var/home/core/zuul-output/0000755000175000017500000000000015137106366014535 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015137122016015467 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000337074215137121734020272 0ustar corecoreܣ|ikubelet.log_o[;r)Br'o b-n(!9t%Cs7}g/غIs,r.k9GfD -/i.߷;U/;?FެxۻfW޾n^8C4%_̿f\ϘקjzuQ6/㴻|]=ry+/vWŊ7 .=*EbqZnx.h{nۯSa ׋D*%(Ϗ_϶ݬvGR)$DD D~m{]iX\|U. $ॄKЗ/83Jp ώI8&xėv=E|;FmZl8T*v (6pk**+ Le*gUWi [ӊg*XCF*A(-aD~JwFPO7M$n6iXύO^%26lDt#3{f!f6;WR.!$5 J:1*S%V!F([EbD]娍ԹiE03`Cfw&:ɴ@=yN{f}\{+>2^G) u.`l(Sm&F4a0>eBmFR5]!PI6f٘"y/(":[#;`1}+7 s'ϨF&%8'# $9b"r>B)GF%\bi/ Ff/Bp 4YH~BŊ6EZ|^߸3%L[EC 7gg/碓@e=Vn)h\\lwCzDiQJxTsL] ,=M`nͷ~Vܯ5n|X&pNz7l9HGAr Mme)M,O!Xa~YB ɻ!@J$ty#&i 5ܘ=ЂK]IIɻ]rwbXh)g''H_`!GKF5/O]Zڢ>:O񨡺ePӋ&56zGnL!?lJJYq=Wo/"IyQ4\:y|6h6dQX0>HTG5QOuxMe 1׶/5άRIo>a~W;D=;y|AAY'"葋_d$Ə{(he NSfX1982TH#D֪v3l"<, { Tms'oI&'Adp]{1DL^5"Ϧޙ`F}W5XDV7V5EE9esYYfiMOV i/ f>3VQ 7,oTW⇊AqO:rƭĘ DuZ^ To3dEN/} fI+?|Uz5SUZa{P,97óI,Q{eNFV+(hʺb ״ʻʞX6ýcsT z`q 0C?41- _n^ylSO2|'W'BOTLl-9Ja [$3BV2DC4l!TO C*Mrii1f5 JA *#jv߿Imy%u LOL8c3ilLJ!Ip,2(( *%KGj   %*e5-wFp"a~fzqu6tY,d,`!qIv꜒"T[1!I!NwL}\|}.b3oXR\(L _nJB/_xY.# ſԸv}9U}'/o uSH<:˷tGLS0l/LKcQ.os2% t)Eh~2p cL1%'4-1a_`[Zz㧦|k˭c ĚOρ_} Ewt3th?tvͪ{~;J0= |JUԍ;Iw}/9nh7l%>'ct Հ}a>-:(QxPyA Z UcÖgڌ:8cΗ|U1,-N9 dI [@3YN%:ò6PT:”QVay 77ĐrX(K&Y5+$wL#ɽ 4d-bbdAJ?w:P>n^2] e}gjFX@&avF묇cTy^}m .Ŏ7Uֻ󂊹P-\!3^.Y9[XԦo Έ')Ji.VՕH4~)(kKC&;嶑, }t&&\5u17\I@ 5O? ʴ(aPqP],?O %VO"d.wEр%}5zWˬQOS)ZbF p$^(2JцQImuzhpyXڈ2ͤh}/[g1ieQ*-=hiך5J))?' c9*%WyΈ W\Of[=?+ednU$YD',jߎW&7DXǜߍG`DbE#0Y4&|޻xѷ\;_Z^sнM\&+1gWo'Y;l>V ̍"ޛ4tO,{=hFѓ$b =D(zn;Y<1x~SJ^{vn 9 j1шk'L"cE=K]A(oQ۲6+ktwLzG,87^ 9H\yqū1)\(v8pHA"ΈGVp"c ?Z)hm.2;sl$瓴ӘIe~H|.Y#C^SJĽHǀeTwvy"v܅ ]?22R.lQPa ˆSܫ1z.x62%z].` 8'*MTox6[qn2XwK\^-ޖA2U]E_Dm5^"d*MQǜq؈f+C/tfRxeKboc5Iv{K TV}uuuk s" &ﱏҞO/ont~]5\ʅSHwӍq6Ung'!! e#@\YV,4&`-6 E=߶EYE=P?~݆]Ōv5ޖ=8p_Tk@2pos/*W#@UTkտ,Fպ̥ 9MGb&0ۺ*u8@8/X[1fiMiT+9[ŗ6 BN=rR60#tE#u2k *+e7[YU6Msj$wբh+8kMZY9X\u7Kp:ӽ ^҃5M>!6~ö9M( Pnuݮ)`Q6eMӁKzFZf;5IW1i[xU 0FPM]gl}>6sUDO5f p6mD[%ZZvm̓'!n&.TU n$%rIwP(fwnv :Nb=X~ax`;Vw}wvRS1q!z989ep 5w%ZU.]5`s=r&v2FaUM 6/"IiBSpp3n_9>Byݝ0_5bZ8ւ 6{Sf觋-V=Oߖm!6jm3Kx6BDhvzZn8hSlz z6^Q1* _> 8A@>!a:dC<mWu[7-D[9)/*˸PP!j-7BtK|VXnT&eZc~=31mס̈'K^r,W˲vtv|,SԽ[qɑ)6&vד4G&%JLi[? 1A ۥ͟յt9 ",@9 P==s 0py(nWDwpɡ`i?E1Q!:5*6@q\\YWTk sspww0SZ2, uvao=\Sl Uݚu@$Pup՗з҃TXskwqRtYڢLhw KO5C\-&-qQ4Mv8pS?kCߤ`ZnTV*P,rq<-mOK[[ߢm۽ȑt^, tJbظĦ FqQI.ȨHWo;Nw$͹O$oEE-eq=.*Dp,V;(bgJ!gF)892sv*+{[or@x,))[o新#.͞.;=Fsg31zYYy[N 1m٢ڶEͦAc?-֋6rR)? I?ytwpC'P/9} ƘwXe就9bQQ!.(GNp$d(3 %רx%z(o6jp}vE_Bf\ZA$Ba-z|A-I @x70 晪MV)m8[6-Te@`E|=U D(C{oVa*H7MQK"<O%MTTtx袥:2JޚݶKd7UZihRk71VDqiގ\<:Ѓ3"gJJčE&>&EI|I˿k2ǯɘCGOa9C1L ={fm&'^tigk$DA' elW@Tiv{ !]oBLKJO*t*\n-iȚ4`{x_z;j3Xh ׄ?xt.o:`x^d~0u$ v48 0?(8E-DHa%LMg2:-ŷX(ǒ>,ݵ𴛾é5Zٵ]z"]òVgz梬-'~DW_vZ :jJ2^b_ F w#X6Sho禮<u8.H#',c@V8 iRX &4ڻ8zݽ.7jhvQ:H0Np: qfՋ40oW&&ף \9ys8;ӷL:@۬èvvn/sc}2N1DDa(kx)L(f"-Da +iP^]OrwY~fwA#ٔ!:*땽Zp!{g4څZtu\1!ѨW(7qZcpL)ύ-G~^rFD+"?_h)yh=x>5ܙQ~O_e琇HBzI7*-Oi* VšPȰһ8hBőa^mX%SHR Fp)$J7A3&ojp/68uK͌iΙINmq&} O L-\ n4f/uc:7k]4p8wWLeUc.)#/udoz$} _3V6UݎvxyRC%ƚq5Щ/ۅw* CVo-1딆~ZYfJ"ou1ϵ5E bQ2mOΏ+w_eaxxOq:ym\q!<'J[FJ,4N:=6. +;$v6"I7%#CLTLyi{+ɠ^^fRa6ܮIN ޖ:DMz'rx#~w7U6=S0+ň+[Miw(W6 ]6ȧyԋ4ԙ./_A9B_-Z\PM `iĸ&^Ut (6{\٢K 5X\*wٗYS%g,0\ Rk k8P>x7v21՚H :[Γd!E'a4n?k[A׈(sob 41Y9(^SE@7`KIK`kx& V`X0,%pe_ן >hd xе"Q4SUwy x<'o_~#6$g!D$c=5ۄX[ു RzG:_[ӏ[3frl ô ހ^2TӘUAT!94[[m۾\T)W> lv+ H\FpG)ۏjk_c51̃^cn ba-X/#=Im41NLu\9ETp^poAOO&A޵6ql?~~| \;/ɩe\AI GH8$<ݏk+" 뭗R==9!nKErHc1FYbQ F;v?lb-ڈFalG*rEX}HAP'Hҷ$qM9(AHx!AF 26qxCdP!NZgҽ9l*(H Žڒ;̼|%D Ɖ`Pj . ֈ,ixp`ttOKBDޙ''aLA2s0(G2E<I:xsB.ȼ*d42I:<ŋu#~us{dW<2~sQ37.&lOľu74c?MՏړ@ -N*CB=i3,qjGkUl4 ,ŸqMHDCYZz Qnz܁$Jp04ȴIL΃.0FiO-qy)i_TA|S2G4miBȨHM(2hys|F 94 DNlϒòκ-q|xC ,gKDzHR%t+E/wd#礱ºȄWEz o\JξB.wLKZ39(M +(PWՇfR6#ю3Ȋt ݪbh]MTw䀩S]'qf&)-_G;"1qz퇛0,#yiq$ՁɄ)KٮޓoJ|̖D?:3mhW=rOf'/wѹ8BS8]`;=?,ڼ"ϴq*(A?6ֆ:1IC8qٞRvXçL ]X/r}7O}Wh,h ;RQ=]uǺ00yiC۔I^3!?H6iUH:ô 4P$rT`%2Aq-֢׍qt=@x#~0)p# ы9'iri]ͪ@繁qVGCڤr,DihB ,m 9 _$q3= A$IC"6g^4e`XoO(D*6"^eTh'4xpFڜe'fVQ7~'c L^ԯwIڣA.}H;Ë*׬=`^ 9]r鐃 -Dfi:|QwZk‹u^6DQ1&H凎c!n[mi3)WfsF:M"uҷs.1!뾧1%s,hQs|hx̗3%*v9(I;:'>uQ+v%vR/egBhAAdh]4H:n^$tHI98/)=mͭ ڐn}}~ק?g_6WĩDRc0]rY9'z .(jM{K$gv>cDp"'0޽5xCNQ1G2})*'>fC۝'*)"5.E2IeD 2.ZdrN6Uœ=?n8D-9޵JKw5ُJ,􋃓ZU꫼0b1f87GՂ 1t_o}{Mr7KO0Ao-Y*Is\S:JzA(:i!eҎ\,f+,Ąt78~ڋ~?[F^.A'!,iGow3{'YToҝf5ޓ[he>=7S8DGZ@-#]f:\m?L{F-8G#%.fM8Y='gیl0HڜHLK'Cw#)krWIk<1څ 9abHl:b3LjOq͂Ӥ=u8#E2;|z꽐vɀi^lUt␚ɓW%OVc8|*yI0U=nFGA`IC8p+C:!}Nh,mn>_MGiq'N~rz`|mu}r:"KiyGҪ$& hw#4qn?ܶХfm_Ov^Ҷ[6j3ZN9t9ZMMM)I[RχC|W䳮yI3MڼH9iEG&V 'x`u.̀ab7V<*EzfH{]:*6M x-v쳎M'.hO3p-IGh ܆hR ]zi2hB9'S/߬;I/d0oIU:m ^_,G;U\cAAz7EtlLuXuA}bT2H_*kIG?S(קjhg 5EF5uKkBYx-qSfqsn[?_r=V:х@mfVg,w}QJUtesYyt7Yr"*DtW痃[?i?s{eϻl71X:݌>EEly(*SHN:ӫq{{L$?Q{϶(F_Ej3mqfΤP-j)H˧&8?a?2xĐ+EV؍x0bv6 fd1^ 2ӎԥ sZR cguO/bn/34'h9Dݥ:U:vV[ 'Mȥ@ەX8I_K R,wt'/4YS!kCL{qdɊb"ЖiҏӋQ`IRҴ225UY5li_ r9v!Go3 "ӎk[8L%H㸡]V.;lM>*2 5I bHb3Lh!ޒh7YJt*CyFÄFKKùMt}.l^]El>MK|//f&!B {&g\,}F)L b߀My6Õw7[{Gqzfz3_X !xJ8T<2!)^_Oǂ.\-d)Kl1헐Z1WMʜ5$)M1Lʳsw5ǫR^v|t$VȖA+Lܑ,҂+sM/ѭy)_ÕNvc*@k]ן;trȫpeoxӻo_nfz6ؘҊ?b*bj^Tc?m%3-$h`EbDC;.j0X1dR? ^}Ծե4NI ܓR{Omu/~+^k9ξlIxpI"֧0O*Vr/tdQu!4YhdqT nXeb|Ivż7>! &ĊL:}3*8&6f5 %>~R݄}WgѨ@OĹCtWai4AY!XH _pw騋[b[%/d>. !Df~;)(Oy )r#.<]]i-*ػ-f24qlT1  jL>1qY|Z䛧UAnO|h^U{Q22 ~$VbpD}}Gj#[IkX8T Q:U>fQ*e"T:*Dᰤ*~IClz^F6!ܠqK3%$?D)~ty,u'u() C>Gn} t]2_}%1NodI_Bǂ/^8\3m!'(Ֆ5Q&xo 8;'J~bo&XL_ʣ^^"Lq2E3,v1ɢu^}G7Z/qC^'+HDx=\]?d|9i,p?߼=\Ce"|Rݷ Q+=zxB^9\ <4.u9hw|-hJ\?DNGcIfx_^`z7{$ƛϓΠTW_k6J$V^7{&Ά+4*Iqt~L4Ykja?BHMHVuV_K2k*`cKxuBG&24T}Lai 0Va(7K#ӊ!,ZDxFQO*lם>!4ӥ2 ]8â6 U`V%`!c%؎ʨTzrKh! c.}.D>)d_ 8rcu,wf2?Ǡ*_lDn}rauyFp*ɨ:UiM2r:9ct X1lmĪ o玓,R%!`hGT LYF#g<cm${|Xdu4tmtїUJ\~dc0KcMlf2?mμQ ߉J4WrSHTdp"ӹ'cJq2zPlX̯.0H!ND@UapVoGڧD5>H]f@!=߸2V%Z 0"G4ȇʩ@]>Y$ًF_Mm_Tt)ib+q&EXFu򾬳ǝ/RS>r,C2NfOjpcm{Ll9vQOT>9U;])>6JdbXԠ `Z#_+D[7IIjJɟUh ҙ"`"a ߒ"G̾H`6yiCk(OA/$ ^%K^+(Vr[RR1"u4A.1X0=7f/"(o9/L1X{]q`Ȝ/; 9a>E)XOS K9mUxBa"'4T[Jl /K/9,rlCAj_TiǘP,:4F%_0E5IE'rX-|_W8ʐ/=ӹjhO%>| :S Px„*3_y.g9| ;b`w NtZtc> ײ1KĴ{3Gl& KT1ZWX8?C]~We$9; -.D087?1a@P5B,c}jcGȱ WW/ @a#LA4.ٹ^XڋXٝ:^Izq. ٽƎDn6ٹBc5Lt;3#i3RAٽ9| cbpcTfp> 6L/_x 'ۙz7~w~);qU9GDT! 6]c_:VlnEUdn6UˇKU;V`JUݵޙEO[)ܶCy*8¢/[cչjx&? ՃJȚ9!j[~[' "ssTV2i sLq>z@JM->=@NỲ\쀜*/) ̞r21.y? bO]3?C!yw3ޯL_Su>o>&lrw&i"< :]_<<7U_~z5є/rfn͝MLmc 6&)e+n7cyy{_~궼07R7wPuqpqo{ߟ+[w_uOq?u-|?WS_tOq?Eu-L_p?Cz .e ϿO*3 `Ђ6a-`kIf-s,RL-R`1eL~dپ&+IhYRczr?㐟,v~,b6)up)3K,RLW"Qd9JgT\1f3@Kh% a4x,kA k ^d kYj5Ah𚄓vXZhX1xҖ51Y +Id ZZ\C| fD>hB֡#-$+Jpሟ,Cg:6 3 xH "}C[`ӨOAFn5ʬLHϰ:N@VcyBI#Dr. "h hg ۃm-qu>V&൘ G7qi#^tҒ[JI!{q*lrD܇Gk@;oI<5xZ4xM"؇'k!>V|lk'{d+ :sXӄc)?W`*|\v aVT0"tMًcΒVz]T.C$cEp._0M`AlF̤@U' u,—rw=3}resLV&ԙy=Ejl1#XX۾;R;+[$4pjfљ lݍ3)`xvcZRT\%fNV Q)nsX }plMa~;Wi+f{v%Ζ/K 8WPll{f_WJ|8(A ä>nl"jF;/-R9~ {^'##AA:s`uih F% [U۴"qkjXS~+(f?TT)*qy+QR"tJ8۷)'3J1>pnVGITq3J&J0CQ v&P_񾅶X/)T/ϧ+GJzApU]<:Yn\~%&58IS)`0効<9ViCbw!bX%E+o*ƾtNU*v-zߞϢ +4 {e6J697@28MZXc Ub+A_Aܲ'SoO1ۀS`*f'r[8ݝYvjҩJ;}]|Bޙǖߔ 3\ a-`slԵ怕e7ːزoW|A\Qu&'9~ l|`pΕ [Q =r#vQu0 M.1%]vRat'IIc(Irw~Z"+A<sX4*X FVGA<^^7 vq&EwQű:؁6y\QbR9GuB/S5^fa;N(hz)}_vq@nu@$_DVH|08W12e_ʿd{xlzUܝlNDU j>zƖݗ&!jC`@ qэ-V Rt2m%K6dX)"]lj齔{oY:8VmS!:Wh#O0} :OVGL.xllT_oqqqLec2p;Ndck[ Rh6T#0H Q}ppS@ώ@#gƖ8sѹ e^ CZLu+."T#yrHhlكʼE-X'I^=bKߙԘ1"+< gb`[c1髰?(o$[eR6uOœ-m~)-&>883\6y 8V -qrG]~.3jsqY~ sjZ+9[rAJsT=~#02ݬf¸9Xe>sY~ ae9} x* zjC.5Wg󵸊y!1U:pU!ƔCm-7^w]斻~[hW$k sE0ڊSq:+EKٕ|dvvjjy6 æ/ML-yz,ZlQ^oAn-})xǺǍ--qcl:WLg ӁvJ[ǧc~Of+8qpçco#rCtKӫce0!Y-+cxMK-H_2:Uu*corD~@N`#m~R:ߙ歼!IZ5>H;0ޤ:\Tq]_\_>e˲\oUQ\Wߋ47WwߋKpwSSۘF,nC.\UߋoVEuY]^VW0R=<ު˜˻ x}[ێ'|;c^ M7 >5\-> m-8NJ\ALd!>_:h/NAC;?_ξqĎ6xMY(=ͯl~l8V0٨T zL{Ac:&$ ^CpH*DW\r2aR|=(L X1|wrO_g ux1^^V2޲jMi^b``Q#dBxV#NBk1;DAV$"*1]Y~ d->'I`cZƕbRzRR7M4ml8E@V'ٿ )q$t 5Ku)c_;ꃀe:9bh~=`e=Sc)U{KI\T b*oj>[:VfRjma ^ Mg0 Z x.f"O'$i*XX@ $n/K x3K?G0:+{r@!8)bzdt. [FZDVnyexsbMsדv|F@^J"؞AH,Ϟ|9΋׶e59xJ ԐɅdT*q+AY(DLӺXYN-=s]Bӵ[bA6^a%c5is2sO@#z˃;8.;:;9MbK珟"$4Gnp8q)},89X&bt>WMji8^vy x4y`#֦MrTʼnlG.ߊG|)R((Gq)+ZR}H拦3ﺓnx+y2s4<16i|3:lVPVT4Unه*n:w ,toqP@kg ˅2EsUT8>bْ ' k~Zmo?".w o(9h(c9*|f&ꢭA.G8 x_87"g*rp.X[K|&"cιz|``ZnĹD>ht68( 1`l_;Ӊ_qv@t+ p C<7ir x?1w ( sF ɼu}{,anpww2NTdwQxA&(/E9?9I+0| C&\UZ9+rYWi#%56k@&>JO| v; k ~x'%yA)g8kye}dHZ72XLD58i`?=Fa .b%ɒ7PX{o}JT-*%G\(pUAk.o 8DގQ}5҆V=eud]8Rܺ[rwUm6 .!T1E04W VeiOϡkmE;~:#FȆtW6Ә/uVy.w%oCJ l1mIwC{50P`kb'kkd5_:I:[Lp[ h^'{=1MgK@,x{Ìllu@L7uZ馕6y^Tb-Ah]M0N'Qv4,I瀖 E!e]j}eɐ+NWT2P6gIkIn1:o,xȖJsf,h0oE NG! [N;tM]]6ňq򡑉!&m&""-"Rbk?i47%=eΧwe7EH\dK\եdX I,Mݺ{.}*뚱LkKEU+z+ ʫL(flcEVA 2M Q6MHݹA};.n=<:J=knzw2}oqBg+l0|~{Byw2T1 7o{‡F_ }$X|֕ߗ+Ih{Ӂ / 'Wтr7$t=iKBL}KB{ !WJn>0p _ ]ĿFO3U:4=雈CMD/9`/X&Wx7{4ow`|aEWudXxQ;}8vJ#6jkS ',|)!7Gmjý$޴- a׼ŷ}>})ѢDR$4$-DP0_3U;Xe3B4Zͣ| Kx4yYfBdKJlշ#6-RWRIgQB ewu{:3\n-}+D%s\qS~zHڄSٷ-g_wDCyq):" J:ѵ'g.\Il.:=/{6 U;wJfo<5k3W5I(9D-)q@ eG2ٚd&HJn&URU~/y?UFImEFq#=mx-iiϺb=L[My-ݛ= ɻS~`<O-7H )ґ#xNv)P#hg$vpEQGEij`. B嘱~`;佂xȺ إtɏoA8E(=cC;#p rNב O j; Hx`qvt޵ ۭbFp*qG1v `[uD(AW浅(DuZ Q]0ضߵѮEA#( q8$}I6 #N/"NEE"kA;E<΃n)v!pc$.;wcO@,^\Qԁ 7 `1pJ 銳. % Bkwu'‰vT7H涝/Nl+‰#Str[sG(֨ (_^y~q2qdIaz:"G5i*~+I)KyȂK)M\0Fx8 ߌm]9bN+G6%ڹLrql]8dej(^G<Jfcc>́+< :(Ll} y0g0:@AWܒ3ݚ{Lҵ| ' lm:3L-۹zKk$=67fm.hw&gܥ{PnPGZƍ\cHpG`j!*D2  x`&[`:xn$(T< y9T/ s_2.DS~KIQaUտJ>J۟~*TpN%i>zpCY6'JNur!:"et^ Oh@asȼ`L bh-+P'(/2FU7Ӣjs}rtiHA9{bA\'Ef,n{a/\M|&8jG.[c‰E-բArn vs&(Ε8kc d\& >  D^E|j_bD8糴uP^Z Z4li7fM.DWUUؼ&k% JnȂ=7LAd1Iɤ8k`#؜j9&`ul6~p7]Ǿ:)\Dz}|IS=Fy/tzE1-jKݧ3 `ނϟ~qf"=89$WT\%tCFŻ` <5u4Ǫq;bBeJ2a79ҁi^;₥*IyϪoY> r;T"@pF)<9;;| J:t@?K;~b~큓v0~ν gU*Kvpͨ톶ؔuEISؑjQÆ<P;thAhS'Pz^фPWKOQbPKn57ğeS2.&Ta- x: Ϥ%`%TU3!i'q%A+n,0ÃĸO *>ASWu<[n kU67197i0ܐiH۰`~lN{~%@b͸sYh[d"QF|xG򶆝bSa^kwL!㲺q|e<]J&n_4%|up2,; D ؕaT_xq?0. .rxj ,B ;d dN Sr*8c,cKA \6I:3C7Da2iQnG0_0 R2ǃGjfOBl AN_|=vÐVD=Һ =6+Ρ2?6}7\3!Y~QQΓD9#B>ѲP{2p+wnI\=ћFH'ECt'~=1 z#HW@Q2T G&;"V1s7t|9M/lvH<)'`s~/@j *;վex;y j !" El3hV͠b{^ \]ԅYW=+8~jA E8ܫFrmv_T؏b};+k~:1M=dKT'N sXȏ ry5|ӥh{!EGGm*jIa5;!}9-z{ҮNn`f oDmKZ_壘A+ˠ`5⪢wyʞM#S8;m ND⪊Tz lwO yY%%oM-lPl}Be!ސuƔm0왣6VָصFv}n<._A,_V7$_`` B Gh!˄kOh)ŸL.+w`.f|FҋNV!/W>;J%%T3j)&^(Olcn.z^>M.:%$MޟyLɯd~#Lwea/rMKUA~?>l$*^/n||Vĝ{`Wc 9~\x Fiʏzʮ#V7WV'h(Ծ)^+^-+?R$PEEJwg Kyj/OzH~ ӫԐOT2_RwNQ6gZEыň(@jޝZ⬭yǞR 5wmwXe; A3`\/a2\\wf-m+nAw_X&òJsȇ Y2e>/wkœht;Nt |WF)Q }`W0Y_#;K0Λa+1*qm\wy]L&d4$k3[  Lxz~S%iϋh/brJ_h5`h%)ldqDL%"Psa{/,XڌJГޜN=IUTF,p)^ KEġZ*z&|ra8H@|YĖW%-٨ܨnTB̄JGE@5@?EcV"o'!*N#ZwA絈%  kK =kŷ L)1T o 1`@LAaB4Ye?Zț`4݂ ؅8Tk-$')RA簞g2)k6E1˒,3Mz m!6t-&|GbqK^(W{Q AC<}+}iLҍV,+Cc"&oYk6#X4` 3Gzhy uֱQCP\F2-B62zDEu~Z[Q2WL z˸8Y&މJlaYIq?5nl5^L+r M3`֞g4L;S$ח,Nmt>فMXu_]!ŵC`fE.b. C0uD!dtՙűfDfU&fρASwK)<^6ݎ*d@ =‰N f#؍bǼ0ҏd"IQs_"D-N.Tzq,e;4Gtb` ot9 ReQTyv.9|ZN磤9yqQ%Q[@b&WqȨ}6iTa]; xSc+cg!uo.](߃X 2,K_ Ko jVmn_wl9G0OWhOA 1tY)uprh„6P舻ٴ~zz+J'ZJp@ȯIYkz>p%o+1(l8Hn-5nHRi$uU/>NnR%p<RKwl[i&}#(س~)I tF0iQ2RF^PuO I1L"mpr3m,'zZ_ zb 2 zhQ.IK>"1cY"ϧ\^9C?N~vP_ޏxoz縿x7۸'?;u}}vI 澛ÿ'M"}7-DkoyslgOQ\!C74:_]oƮE}տN#9`گ'^m,+wRzAfLp(NlkO]vx:=\}[}yiu (g 29=EYK6b4U$𕥜.4xlb|@PV{@Х1, LE+s\C#iF.o* RX & &\ g1LuG⨄ӦvLVDÔ"gks H Α<=VS&i9 ݲMIpa$Q7K6Uy%%#X:Spt;ylHpj#O[63o]ZB2VTMGX[".~l `yfθ2do FMV#DE!&9Vk> VMH:rKReҢ  š&D5㇠@ 1]I=DfjVdвfQfb<:Fq'#D>j S6<Ī ^%> R81Sv]@7+ jЊ9:5l>ʶTp!F;uONH0t((Uc:E]Eǚ@ 5Wi Zle tksB[m7hb]m֕<((>_?>>hǞE߬)V02;ÂS.ۨ0"F [z=/5{$rct)"+c̃KZ1z|c0bˆp_@r(΍0=g:DtK1؋J *J|1 طGp{psKr*0/wlD b$B`;"9&KqCoE0­)iY/D\7KQEOsWixV^pEP!WS:~.=VƊ<8! ƒL-#IIx DNءbW6_'KegU<~kHړoaۯ*Gcz&lGZD gR`M}MxfGtHjkކuԪb2XZ $>> ]"61TW!=ouo/٣43^ޑ_yLGaY37B&0LpryQ%Y2S5>u֚eO k\LXi. YSdo -I}y}̌7zVf tP%]{ؠ!b3tv,3ICx\38uQl vpsiĨ[%mO4epx-Gw`/{ؔ VS n QGIןFjC4_piv u7.m&~LFs[Z_;^؋D{)ϟ&3N>Y1/ƕ5f#Yt:8yH:؄>U^# 4"d`&LLEȓp F ;3c=i墩Z7yܰ>kȒGvx!鼉<ݑTyHJ˪҃*۳xoϖ0xV'eUȜը4S7LwIhiV4v[HpHN:"i 9gt YʠZƒ\|_9l>:A&z>E ࠢJ˝0bN\Ȑ=}}x,‹c:d VmM`^1HSNT >uWcE姥}\QMנb5s^&ͬpyYl1UK !.]fK"Uf$*}QcGW3sL G7'솫/ˍcxI89"!M"{EDC+);l!Z?_ߐ8uyt0@@%SD PNJlvH:ñ#qӆKYT J]m;,h L9!/PU  jj9!2Oc/jEŒ "ጷ)<86bTiո<\b/*A&E3݁4»mZ 6}p@e=ꑝI#?pCr$+cx<0KaXpيm W] I᰼KM$OhF~|=%oId*a[hœJJcw=k%%!L+`$ړh~2Z OZ Lt Q|g`6 sgikxpOW\[gxNG`qp~:W n|Gf^5 VzM+ w+b$eH}qa%x7yfqN8Ԗ!`%%4D ,KH})ңjC(V&hϼ x 434ԃ/.*\Hzpްh6XDϛ:o3#!se'F;%`z.4RJ5sȖ{%iY0|4 RfZR{cLdF4ߗtLP Qtf0mU&k5Dbh`xCMwbİ1S#.O`%f.^SV/ $tx68;&i-ip, X }I;nB0nu}BHFc;pU] T Ik'jO4Nf-e?raOL+r2z]RY삡;K8\3 ^T2A.t6zxL fnK֘ xYkkEe݈lfǺ8:=[#OmT=\`pd$v3p_2w`f8"i n3.:%f)ô2ZB3υy1Ig^Ln)\npiW= q4י6Ļܸ`":%pf<>cdb.笾(&Sb ]ٰ,f;JƷG1;F kV&UdaYjJb0 ç}ыrlf)BcCz9z)/"ЁwiK :'ei[RcCBO~(c/8斎J{GoXnVm#N蟍`Xn{#IpM^+0-` Zd1q^蟉b[:eݖ3s[R/qa0Rzo11+k5?o5|* o)Q|rQ _]ǯB q#iHm\*&sL٥"2XYRCb}=ڃ .O[[U.^1zA]gC,YMCy,@`oriitF̀y$ ^t ?ԺB2ap~L-F"0"_e>$tv~}ߓ8z]B]#O=(oR]:7 bp`Z "kK;>voMHj2=8:xE\7)[[hUEby v<)A+t] /6ѱ # _/muԲ%;>!wח>_vïBYB@YWIuػ6,4L6BXެCs0:F(Îf}_u7&%ْÒW_QWC"?&"PM71w4HS ah0,} * jhtGBed%3qW'> ! ? L2I)'5x_BNiǥRi=|facRz0i&pm~?8(4W'Yi/A7^Ԇ8IYxLB9e6ArwsyhVm"tqFyjͪ]E_pu.!Or >8.|4S$,咑^$ GK)f) 1Ln;;HDۗH4Z,Z:3`* <(ܡBJJ 9 qzpޭ6GP3ש?0\4* My{N;L[2"3|Up77| s{݄ɷoH.U}0M7.qz$niFi rc%]8 w z'~Jǵ<}0D)AY4>IǓOex~;Wp=9I0 1o+1$2>O*]*'+!ȅ_A uqM0*CNJ&»$WZ=o{ b@e~_nh0dlfw-? 9Uxa~FiG/-4ݬFՏ/W>Du*c73 BY^$WC<tWY.+=En:%php]s K)2 D=fWge>o]&ڮZ'`}gFa?<-^F f"bӻ?X@7`f]cW?ǭU2!͹1Xc'A )KRE܅!0oof@Wo 6uJjߚ , fA1ڙrkp2`dnH웋´̴Ĭ^U4VTA)^‡q> dAzt8IJ+M͐Ú~Q8U lLKJ5O!кW.צqEO\\nm%H")6V#2쬓Nȗʴh6Pof%[fMᘧ8$iuMZT.BR*R-Dg qLD5KCXӳi8)$!E,!jpXBrڥ 6ig=ڠl6(t 8W'㶧Jiy>ƃ8Ѩ9@5y&Uh"Fh"u{Ơf Ail*3Mdp %ͣ-ړt`+&m`HPmq9j6hڥ M>+y{60IFĚ Q0ָM5=\ ^^'1Wr_Xc4Y/qŸx26z(,MPABk )ʍaG ړxqKk-)Ff)`LnhID $mXP@:Z=Vjul.Z't2f\ـYDѺX*^e1ʍߟ-,9|0^ rcXQpk bX23iZRk-u|_0G~MROZVlz:c'~i .!M;rǓ(0"rbrKIb)bu;K*<ߵ1 >Q:@w FJO^S^aѝ=lYUtzI\C:YN2E>!ҿJaFfc _.pmٕh֊绤'uϋ<\5:leUy$(4'O9m!tyUt9zL}0Vf+ tEAqkg]Ď5ox7;5qv]K0$ڕ}),=;%&[+r3+^p_\~iVS_΂!!L~<Xہ5y\OD>$%cb#lANdž|)Xo7^x$j{a KJt>]ƲP_h҂QtPB :}N8Ǽq#n|HW_U9?(z{?SW]كѳV߼`V;/[9Sr'*tuj-I`ڞ뒓5%'i:[]DR!9]s%Z|R׮lݑK a6t"[qtru;nYy~|3 ]1]w<.ydqq_*zhk"<5V@`Z@-" 39孋 g1K#VTznf(LDOPM:˻Yo+-,+ &z!Uh(nkWGd 8G}HIù&,dm-nZQ#-nZy͠`ߚ*^ J cBc T IKB!V20z1`pF!r֌5Ҍ?{]lʑ>^}5;22 DR6h; |ՋX9iE4i F)c Wx5#h 6NYoRZʔ&D1GW{x@.0+DkO״FѴGBc]vҏlV͎lOpI?:iI$n{neLJ|I?xóL56>F)Q>{PK9pHEtrpAmh|Mz`qY%>NZ[ow1=!œ^%ljFݯzu>9}50|NQP * PTZs#YzNsǧB3Ρ*j㡝iF90*>>C5Wčs_3ചpŞ\kq߳J[ 2/V(VITeB ?"N-| k~x2CF3 (r> TQ®UMy!AD!rDXx.t/?6*eŬK1rGSECT)FSxpw?MrV*w?+uN|8PJ UTZ#0#d{Pm< ulro&6v)E W4dӹM\򧱀Q;SMόy!ᨹ  S!wS/w aEt˝iEM!|< ))oCh~L5OXꇏ=R10"NT3Xtоfg6_t߳3~gg6VάOF yΐ5;#FmJ3Z~*&-M8^_ʷRt\&W5-/38L!jdq4] {| M T\Ur7@s('EkWŏ@&Q^nvyȒa].])BZL.lwrW4&CmF<.*2tuvirz\LN9$eI"<`2N=Sy<"MbgԤL6'zSPj&G.&bj2o&{v7WyArnVG$@a4LZy"!<YF5q;ӡ-ͨ/2_c DQĴ;S40$B菭q>JW0X`S>)oIGUy/_".# <-E)o Gw (T9Mϙ{l$0&{l&.sri.;1 zVgq$,))`BhoC?4sa4"ju* QB乜Œ6IcGL@K8b DcbLo9hXi9$VyosJ&@JZ/4OM}FIC %{.@"St~e{ mQ3f iZmF?{W6+.)ѼpK$zLȒOsYꃲ-dSl(I-;3;;;cuf~4Di٧iӀr6qiH&>qպ/,g8FiaZk Z!0L:N79#X峠U"1Vn6!jqir8/45P1}g*`tzh3iyγBٺnYmEd' ;Iv/>ܷ`Je@kRkK&N79e!)ca6ǫ7A&rG@!9494)W1^e^ qA4b%dOJ2#Y/`҃tpBHjէ՟oQ2SC0#zVh/}=d}CtH'n01[@q8k2Wdzv.'kwӈ)Dʁ4!K[WM5B}^\*C9i.i2B,BMǔ 2GSҚsi3Үe=%O :gZ2GlR2 ZE"dV^l* .gumSBdQ;@h=H5"VE1xt/*R V4Zm͈GӉSfsGIhql( c:1M=[btQuEԮtWK`JF&jViUyZFi^q'7ܠu=jR*$s1y$咱($Dj?rA͡Ӿ|ݲr"YzH*@F \+U!Z= Z-v!Aη:ovmI2Z5\v,mD)?dN,ۖʚ >;ʦsa ̍C$S#,XK \MS7~q7O ( 7`D&سjE൱`@]6P}?%$eos H~Qۃ T[jTfT߰H:LnS>Tv 0YTe0E'݃pSdSdipeQtOY9VP(x_UqSNs渏OF|*qrDo+'򉖞gՊ"`Q 2Ԁ҄.I9}ŤDl,{ó3xWY #J5o1Mg񤈳|(pVzxbbӣKqBMj}G %mCƗѬbj4}IDh")n*QN"xfkX & >_VTxZ}~pM0>Pc`@{^mVN; j0y ЖMrNmhD ewɷv5&WWk>,&W$|sm Og-ʲ{q ̦`JA o5S5 o_{~= vhҸ8rwr'3#@Xg哤9Aq|_2 *}>&;KʣCdnI(Xdž/^=oklj&cU&]I]R{(= ˑЅr] ݷ.|3#g,[%9VtrZc;(?+[7xDQ*@@ԃk?k ڏGmރ{[YSu00OvmÙ\]tXnSC5hĀ(ʼnx0 Zvمvf*rvcQbC6"6q0E=]SlaL@RI+jM4Tkj&nw 賫r-e]a`f*pa <)i.6qijtUnxQU :OafT{vӧG'.O\yP뇊?ܕAO>;7'd=ځhSOK;x_ku&.YQ_RQ60ݧh (-vZJkd7JT|g }{FZF Smؼ=aH8߽{xbaXrL9 d ¯F|;<;Pfos [^PrZQ i#VB:fCq-/O9Kd|y60A ᡓ_OOꟖMECQY@Z@ﻌ)~C&nzV*@b"r=gbǐ$z?>/xs`iA:}L_D\֭G(//ýͭF⎆?Z[[/ SWG∈J̉@ :BL`!ZcjK|d:o ;e152i^P"$#,Ml}\60-םi Xw|ƨ%\m;0`]AB ('n_AlN]^B˅5Ka;á`H+':FKVEG (U{bctl_[=R}FCeT7*nrC3M4a4d  fVsdVte)'jt]ϰ%\m$83ܮ8'˞ohŕn?cܜn rȶDFseG@| |$0&PNΚG(R2ĶtZV]kvpuo2օZHW=ny7k_ZsBZjc0"Z;V C(hȧ%6,gqe8@6q$pi}_  WeS!'᳊<\D>Qzw[ݛKAM\SBmk!г7P;J@opKV1̡WF Z6cDEs v#&BCikRh50!:LWYT(,0UF't>y qZ@B ϽZE(Zcc_rNïh'c]'xe9j冠"zY`t&Eʪ)aQV:41t6M&$DX*|[j_uo"/e|ajS[TV0 '<Wdh:xLLTν?4RlA]!(^7Cv["lALC_lNe6`鬒 aY5iQGyɗqTI*J0r<4騍U䶞tCy;&&)nhڛ*KÒ \c:g>7P]c0ш=~y4)9 1X֔Jw%뜬gJ9O/Q֤M}Ob/N&y|!$J\ q,5`ZBuRpSdAU,̧{jQ91iZ \: Z/Q(BЩoIo&g1|O3 # vwi9^Γ:"LqT_=4lB1)y+:V=~C_&?qJ`y8?[MUX{ d [̕I7ɀ(jEBV<2N ygxiZ/\ 'Ll`VjtX ;Fɏϟ ) |GY̵:p(sX~::7W/d"ׅy,Y=7=/cWG;c#3!\Htqdw+BȪ%`gQѲ0>YL(7bN]x5~3;z |6f&.n$pJB z嫥ʺKe={؅>xXi {q0]n@ si4yؐÂ矇h-)8vqET{Uu ~{Z*>Yyh~5G˻Q%hOΧT\h>{r1_,bsbNI>bg 9-KTA0O}*^^SQ"'6dfIiovbòo{ Z?wm?l+NKgpgVKưTzD*wbz讘wcb݉c?sXؔgJh[s"*+bFM 3!NljcFpcp[{Sä<(6&#iZƩ֊niky/SN0)P܇GL$6ΊUP"0-[ a%,mO̯yNU| cT,A>ۘŽ!~r1Wҝ턿QX$ ~ݛ?'0r7qg/y,}\>kYOn=sZ`?)Vl su7mx2ͦN; 3vYћAVUArMM|ѷU; WCGj<EfcEm ]1 >?-0쿆k">F[p7hu?; ; ǻܯ.׭়s뭠#20f]e=\^c<xА@-Ӫ$99$%$UD,'Õ-TbZ;ҡt2YeN RAIfVm&,%"i02bz&Aq[00`=ۊ0ژ.y0o,.ZgY.Ml>[_~:cG?naW{]3;v axڳ\Q)۱Nk8nio~h6Y.R" o  zYL4g9J2̥xLUVr0ԙԚSƈBè ,5_q$b! gK6Hn]Gʲgn%E(,X*BS ^h2hoWuN䏝 48IL)I2Lr}f%L@L2E鉝&P> u8ur--WۅAj==gd,%E\DnfX\Z%)jgbX+`[%==Vk jZA}_-<OWӓ$毷4-P[٢۟벿_O*nokثڹ ⠫9||wGdDSb٣p,;^?)jU}DC!z̥6u#$F01ϛ?/ שN>UC^;!A$6L$DQ3$Ώ='){X5i A߻:vucL |\C8~NG"HY^XX2$˔yVDFr184Iɩ >k3=sicdۇ 6ne3i6WWj~DNﮮ}#_VW_W]w̦#66Q'^uOϬ]e?!s/؏ﷷ_Rv>ؿd @䞧7\yu1w??^ t`=YSNzl#jtkTOc2dxI[D $^aYm_2ܮ ,]}jW_V׿=mU溫,p1zAdԊ-NF˙bY;rEy~*? (>3M'[o_Nn}i[^+d5}#HY5ND`سrH &cq"WtH#1N0"s6$ Qej‘'{bp>ڲ]|@K߯/ٯi[o]d^F;68ށY=cj4juyqcr]Oi;YkNJydiMd.qW!̳?&AbuE mN)V7{@H&2 % 4%pYybH+.&c16|-g4Y."Zdb68o P8Lv^ªr{ 4<{"*MƞDzXops" 8*7rZ e1{cptXrgstPHI\;%> Aunb)GSq#'<&}]v=~xsz`~ 9>-W5-$q( 7jF_v ]($K >:"CjReC,1@Nڋ#XtZO0 ) Va y( ;sSGMȤ% I”.Y'NFAsP#7ӱin*vU-ZgIHmcIs58F!pt4X7,Y̆Ha)]Γ#[ |uCem*ՔKR) 䰭0&J%9xv<(quَOk?Eڽ# _X]NȥIp;O6i6|,Ţ4ʺ?h;5ۂҭӜbYR^TƞGī_7f@\^'%B hh@'^Bh o |*ZJI7ۙ :?k xQ\"05Xy5Ow (=8}ks5JK6 QVo0$6$c;N:H|<+|,_ŤQ6SBQ' ErP`;A*s1Lj^9O() |,',=>9"Ff)#>JJRw1vkUPckgVT~s_’=lf?? tGeXpӜ./pОi$?P\Hn |,|2ɏ3i3qBD҅ rA"(,_VDVpx,.ᛓrāq냷)qۣ15nJ ݹ~Ry`I/0igР42p$81kLs ,# E䫑˩@FE̟,1Pn|my@rYMȩ!S8 } +I%)YҤFk+#ptmeXOXzQy\B15<լ$Xt379-GأGrВp%΂$8XMc-8,Ge9׋9ɚ9ll`EN b) n/jC2uBT?g,t"^l|}8~S\fprK`@\gfܺB4Gr-8,GeIXXp/g' mc hln?[ǿL)g[2#RSrNw|u4 iIBTIī-_:3;-8,Ge>ݬt5 (D#bFx9w4ڃ{GPJ;X4czPc?_@̴ UHo!u5$ |,ktz?l#w77dרBd!SໜCXBn*i' h#8U >])$XߒJ(;eN+t: |D&!ަ 8Ɛ%g̣$ |,k'm'A R*)/Kp$" 2hXޫH.kWc!ٸϖh@) |$9/T~<ഄ%kM`{"6‹=4ڻ] |,'FU *妝'l;vF0U/q$J `~j?^tF=Gv7jTJ2+BufkUҀp ~Ѧ,!ptSP#Y.fX:x`N&YZXhG>!ptG>Dz\ S֟ۙ貊Ȅn̓ 5QiS$RySqiTܷ:51& *rsM O;]!\# 9 tD ot)C!+5#>FI0O3m jģ]>f҇=I>AOL89h.2rBJUR$Zsk2|ƐWƅ=4|]،6M&۩vR{Nj@֋;6'/Q[ z+ .*\`}NJ{{+~2|$w5jNQQ"% ICJњƑ;7fPcY>><)[gL~wVAK&{E ޽l嵣g9G8,W!w>~6=M4W9-TPe(r-YEuTAX2NMDݮ7o?mVhj&?=:SL"+gq7!;)aRB{6;1hZHP l>uu~;-0D{: 1艓׸bOQHIհN5,$agb)a  x 8>uZu~o}zB{faDx5.!QՌvi=*<8H73Jh6;bEc%ڼE$*"6#DBņ$6 Jzvܻʒ ٷ$zw۴R5nh̅)3ԙ\'һBrD.H ~$1Z} { {" y(H溞<^g`r"1 ݂Uc0rB/gVd=5.rN:==$.l v'u\ DNyS3mz=P@ =9׸DlƃNN Vx(L,́z%G">yuh_j7lw+Sϵnw>zf_Vl!#H[T]v =gwJĉۃVJjȍȍ!e6C[C{, NzQXyKx,Y5DQ3;-GOX6q 9|3~UՀ\EI)4}c.I͋"=4Xn , |\ pTr|.UCcFR6lH2nlQ ނ GBNli0ޞ,˿~_y^o>o|3[pB`r1SuP a:hu3(JHM#q5^% <[ah!C^)>Õ֔#B Bd6ik_̃Fzh, r>3?$CcsC%gv⇳G~}zyD{rsѯE/^% A mf r]]L짱ypׂDXJhJhckbyf9Q}h"ptDz\%6$D^iHpVN8M;XAe{ۺb])^ m}mqD)qة$e-+d= H 5!9?8sLc!0;4 2w4ajWDԀ!2Ɣ-g)`,OrY\X 'aD #a!#a f#pg9 \Ǔ:Bc_pVp!3g3s1.?_w.9ǰ؏T r*;S</*F!pw<,!apr.`r,CaYXg+ IK .)`\Č+x>p}_n݀\TMOvOk˛JtwUQGS>58!%OI0_5%u.R*ƊPi \EFUWA-.Q>nSv[?s(8-0L0T%F ƾBDfqFh/(sY.GL&CٟR ߙmēfiLoa.[ d:v6F`PIG 9^8\#&#I7~*ͩX핛YsO;FRSӄn϶Q8sxJwH%.NG>d4F$\ߘN )" Grs&eLL=랛;ɔpeJUI e41,9t$\Ry)JMfY; ~ԈMeAlQ,f3 &1ao&ܕ%h"PFJO0r8VY8[GsCcpǫ ,GEm =j<bf%1ģ)?U: #BO(l9A89>gh˭ἩmsxKvLt`I^Hب(A ZvzhLyQ##:Q_+5␕EWh'yf4΄if5O5¹%2<ΑD;a[F=%]D  ø@8PSJu,gV1gw:6ЧOnP_Q)Cʡ Ao'T 0V_a\F_Sl% W=e3cBmB*=IZ6X*HGe5I[UM+G;~f`4~ȁy1/GTgӖ (HKJZBB/@Պyx, R?}D.C BD6"r:x; K\2yh6t` fK W8⡤q]t% N~ހA(. 8hE8bpv5]":ak#cF=xkcy ?GTib{b-_:qF|bIF3KL+%%HY0,].ȿH Oz_*pn.!8C_{p1ؾ`\Is=r\5v!E-`%nΜJ84%U9'xOO&z&UsYTU>plTfyẼф5p;>c!cF'㹀Y j>de o>ezhN>SJrޘMKg,IsF34OMO+v~&wc.Y|rAИn |of6 _uQDOd.QgrWXUeP}ewnrQtKmjò!WOQ'}S>_a5ef+2uvx}~.GƤR;ܠt7MMRE% \&&V;59vWW0gٌxjKaVEjʋA 7ޣR--פ&-1 2;:M*g4Al@XcMNRތJ9z3o;LV#`/mbD9(☓uxll(bU1b_1g\$JI*G* L {)d4+1,"#Ql9L-MIطi.{n`mWEҿjA0Re]~(cqsơ|6ǘ~=422(3YOl ll8,9DbRxsk$U6mϵTe-W8!H=0NHSERkO-MIDJh8 \ 9c*ãX1T'T@^E)Z̺,EYͰy>OQ.NSv")6s::j^0V(s9bA =4|BBjaش,w s>`7$J)wCltg \TO b 4;c筑Vt{H:GaBtOḅ%~.w ?1g=hr ee&D־կaThĆƖyeJTʤ2ܔ| מ!T9?Bv ںz}!)dF՘NO x0.GE:`O&Ϝ2øDMxn1'1]KEx<-uS[ݚ<'Is.;\}R/whuꈠSG 0Va\DusMĜNeDp L+E̻(cz[aZb$9hvBurG"x7-&5o%ώm4xۇۈ C OlE:Xw:ŇvzL6Ԃ;`{ۘWHjk9ΒVa ֤ yƬ\HLۻN\n-%hcTNjfz;>^VøR쩌)ϜZ5tӜ4Vsz0V0."Qc{}W{`j{/#<pEվ۟pxXCoD׈Uz,5< nU"vQ:_ SbM~Ae !j`^ø^pzׇiYLL&5$-Ll+FrP}ۑw3uQ]XCmD߈ |T7 QIYC`w"k&R4';3F}\DQ€ԮKwa6CInG7~w["dh>J%"I5]bi\vtwWCoAo`øi U2|X\)FS%CfEe6C$!b +Ӈkk Ś_8[ew?n_ؽeF|]. Jf؎ɤC3-n[ Hlxx)P0VYWomcGyk*r=9GኟJ:VH'/ӱl:y:4~Dǒ^XrrH]h,GZL͘uX/t6 F䃍W_jدO.茤ɑU0xi_E^=}S^;h. '-?38- 2$H!423_O}2|n:9i}>Vctܝ]Y˜)'_k2zcɘn} JHӲI#eyeQajHxJ~K-+ߧMNONŔ}{ei5^y}/.q2^'F*w5S)xu}lyޝ|ޚOδӚ ,$cYb|zien$n{壞Upl (*[aF` &yTh:s&ed٫/ey~N0^HTݼhι^'Q}WLkv v&r"$|X{:ǭÍa;KOflZ 9yk%\cd1 >Zyl+J` `ȣDq9ǔ$&NW1>T1U-"R+8IX1,)+ȃ5iɻÅUP_^\ l$dm7?;"=VNC1 !z2k[-̮Yvm]\`^F^OJ`5X#obxΑJDLNȝvdV>+֫[]Ť4+STKRX?ϩRtĺ>a,v4Ma5g{j-tXcњ*e\}T׆535"Ub*aRZLlŇʉ X"\G@"XrŢb/ݛW", G6  xߕo]*OYVWKrKfJ{:t^{aѷ Em))c)5_K'{HI$#JnjBTסC꒤aͩыkfl]Mml S4IrbDH]IH H%QH8%Ԡ;M+t SJXqͲ4[F}b=y 7uc ½a4C&1bf(e#x)ĠUa.me;h?=*Oe@&%3RCorMp#5d{DuHENw7N6P1I{y>G 0,q( ُa ,3 /9H%` }oo=!);4T4u٘6v?D<|dz0ѥ4C@LxڜO҉%2:ŅrƇ%%6|0Ef!_B^:hGϢJ4B#FcLs_\;OF STj0myCWWXn|~ 6|6kxu >;u,U4.FTe MF2w aOZF_zewKh3~A['w{Cxn=)\8[]݃>O^Y"]DÆEeƉ5 б6AJI$X%$x;bGO۰(2^?2F~wBh^ݺ[,X p.NEUE1{,&e)a Ȥυd-fn*[yHMbl/y@\P$,@ #Lx]p|kt @X((^oK+qK9C==.QppPGh{$8B!3cQ1%  Ny$KG&8~EKEFA28p~|3 @˶^lģKQ 6@ ~g{Uy=A#08@;Os/ysBt4mxܵx[ؽ{>(J\#g/R+B2P1I#a`beg梄 |``^+/P:h&ÇxGt NMyױD,uf{D&C] U6C9p~!Qxt口îe]շz[9%jI+0XxxM)=r%|PгJ3N\DEuHfC9w<`(J~9'O.Fx9Y&aj!@䉬cN8Ee \ p >X,UۨF`p[}vڜ 6*G)WeQj#dΒFXpXZS11CXA$Fy D3+6JjsMZ9|R'H]UhKŶZ:jrp8ovtE'\^hu{eXG(Quki`T`'A#08cx͌M|}pb-uquc)[+Bվӿ31C D&w ))W0m P>PJM 2~N>ylvׁ$-gPS㢆O XL7_I?zt!g1({X^;h'Gc0,yAIrf]C#08XjGZQ2ZY*fhJ=+'~$jQ-LD TsċsҿS(PL [^ j\,muL)Z:ƾc7adZV(겤h~_*s %W[`ݕ4׀]gíG/zx`]2hV'8jsc`'[PiD`O2+},9ڴ^rԇޚ^}9 Uq-v>wpg>=yոgKL" מk]z{lg4Ov饳Sid5wn)k[uI!z3/OUtmz=-o&y`-z:+IV^b2lQ]E]<ˋ_^VEM.9e@@|$o]= :ֻ[NJ}u vu꽸nڕ[դ g޺ӫbIC=*tY[iYnrf#߭wsM];ֿ֍IufX.Ú64,g0֨}"r3 Z曷ugH'}|ONn0V}es^x>F.,Nŏq$*V64[*eDj20XKtԃp_8ͦ2u3i40otR–KU7)ׅw:ϩ[.(:kɫqIp>|.-ëxX|Ѵ^rӇޚ^}2a!-3V!4I@Xƈ\;7H(cdqJ~/ԛMU҆M҆Oc_68Gy$`1ɍf8FL ̺\(Kq3_^ Bǒi($sFAIYMQJ n4i\AVɇq/!z3O1^v`^8Vzx5~~ȟaګgF=8Һ]ש@yn=ky5`L _ѝ|[}^x4׊%qЁvt9+Jׁ#4g2W6z;-Kb/9:i~`c}Z}{4hKusX?_Ry"ۮ@Rkn3!0$eìkZYy9 Ƽ31"qrkTg4^žڲ+ʜƏk nihR|;(a7Q]We9(ucǏӾXأȆh3nǪ$Ϫn2HVqdACr߾_\vҲuq>76.gQwނ Oo`P2hB#G X`jb*PG|~Sيpzme5r*4QŶ \l]NC@Zk֖aN;il2_"ʘ+S{x  so;ʩX9|ٺ3yCRRggm<;! W9r ,ԩ4|lfWF[v4~rT^ '^LY HVdʓ|〦`,K<0j_尃pc~M|QY E-u+Ƨ0m*Rth?~6˖0jfĘ, PUĠc-'9!`*gZgstzќ6j4?gJyb%7o9d+XV+jn2튕9cmXոM(%a_9JQ;z6)γ˳J@6ݏn԰a%z1[_BkjsHu⳼]T(w۪.6o}F#Oiϲ+4/o Y9BWlG8VOyZ;E:{0qXtt"--F^:d?ͦ nvQsFe4núM-0[Iߑk2X/7?O/wZpWX1Ǟm_Nuz ́~6D 6BZ d|EъPJRN^+)IB;%#CLTLyi;'E^ *e۫9^(sSy]ewp yfEخAyUCƃu-TyIY2 #ulm2O4Y;A7hӄKK^;S>ׅzUT:M#l>hVI` O\n:_=n.[a½2BUofz'V3L+N6ÅVRrP]1 HMY <ڊ(OtrpjUL=E=ã_]FC/&ξ{*a|Џ˻㘂@!^9 wA4^+q޹z.qwyarB=%1!B%OF04-t{ ? q8)PQI&"-`sQ$V@% *Da;5&KmoҽL}9.Y2Stnnk\wlȣ|z~HVwJ"Z+V"MCL AXFzQC=EW>ðvGL%8j a]`LSj_Hrv:5_/Y ȐO}0D >at74\$7fɧQ&y fg {z #[`Ԓ wO3$K;{\.D/7-nG,ߏXucܵ;G^./6w?Wi^&#r?U'8*3q$Bfy_KjVN.w 5M1}Tթ9&Gp(=Fts;oȝBo31)hLVt E—GV rKF#,h~9õ0jARBw`_2K+S}>YڤGҁش ; "gWΟpu2Fѽpn,kvYUw9؅,R¦J$ǒ-IkKwF.ū̪Ͳ.u35 1 Q$tdcP bUj}g>z;Y*8Ofrxyƅy_k餁7]lhݣ\8JW]jZ]纳F$V Q^$ "=;K|iMmXR1/ 2 Z{j3iqA'܎&(lO#Xaي0U[&9lYԏ'q#s߼jGqcqxr9f풞h`A77ok)t?IGY''[wٟJ؝Ez.^Vi͗||--}떫/BmU.h$nɋvbv?Ywޯ9y5'Ch%W[ ďWh70V6jey)7 1!eLXόWB TȊWs)đt+꜂S%f}w»Åyٮ9:țw+WgK:еAhmaH&a }S޸zo|ɓ e{6$H& Ņ* M껲 j/qvf#?SIgm5>H<՚l'ʮk bZ#za㩌0mx/vDY"=6 gPEuS~ld;^X_=v擾gJ=>XX`3{ L`& oĴ)뤽wv hN`O˿So4hk9]meh'$֌Č4=^s{9:R^bA$9@z׳ջYfB[ICϘ_3hFԜ1&4\G|˕؋4/ ٻ6ndW䬇*9ݸ*nI!Qa5E$%[\!ś8Cr*g4|hvǬ~|E(C66-%+YO1F\Q ?chhHyΜS cb-1' S3 CLt*+HdQK(jfJ[eTF)y `P'5ΝFu,+_V@()h23Ld2ϭBڌDKc"3#GRlB ZnCXő.ALAr@"RkT;` sM39wXLyQy\nG6$c Xij# 7~ߧrg+VkK(_ʞN4{cfMovRqvx ۆnh>aY#OC&un$srddɧ*5G1N:! 3M'Ү 炉5N "fYORFKaRBC 3<72Gn2lz :H [g1{䌸GŔƭfC8C<+svf]9gTjP)%7"~򚻈t SK(4/b$`r*sL9B(p8%b"M4;KIn66':5.ft4o 蒺ZUL7嫇ayWpi/E+ȷ/ᐥIqrY(9Vʦl֏[-[1" ܏Q..U>I8+]Uf~rl(&2l2%dedLNO/Ox^?`5|b" GÔPe]-f(BoK$ fW$b0r;?~,h*6:#`VIŋ'IxJٳ׏H3ܩ}rтPEf-RUgr$ #Ϟ((Wq4ˌ* %͸Y>̒+ ~|*94JŒ]vWp(>4L&OV[jz܆Oʽބطp8ρYR@Hꮞ]mu2" 3ꐳHsbY7<7~ Z*x= G?laa,t(+zO\b">|ZCZ:CgbL 31t&^31t&JJ cL 31t&Z 3! r?~;p&~hwjeHU1.#Vq1.#eĸ2h*1F8eD2b\Fˈq1.#eDюq1.#[18 Qܓ*D QB*D QBHu$9Y^h@0d &q۠A270  "5eCeo6 5̀K [LNrìD)z .3Kÿ(cjNd,6ӆ2B ` O0ڛ{1GmS16l#یCO^ՁSJSmJ ,ͼ2i&g{I1rJPmFOxZ{A29^[ ^,D)n=k6Y:W'g{@jDXh"Z9w` a{ J$VRZ!qrcS%eyHi`)RA^Lt LÄ { ?o*:6eA_&9>/,زr@azι PdL]$?*fy/&( Vo^8O~L1V %g%RQig#G8\ɒu9z P/mgd[{x3}殖̴k<{J8 I۵z:'^&eqPZSP$dߠ hS$H%@Gs׃ Z6S}m =4gFNT+ܲZC-6 %?Ղ.3FEŕ~ൺc፳ 4`'ܧ |Co~2 x(>'g{IͰ)fzV7֛P1̻R{wC|{&ýow&k,!3xr,d=P_frwC vw(X)dcŅ$Rdz1=pعx]O Mԉ+V{ xwvtKkj}:.uuDJe>W}>6H;(_q -ц7m1qŚlabB 5l{89~#=4ёRr6)Dl{V^+sH'% `zWƾ|jyssg7sw<6ELbBԘ5&D QcBԘ/~Hf;Sriw5 ·)}iC9E iw-g7%z'{L#Z] c$'v1hV~{0'!]N=ԢiA2*A>Юa\\V u'˯&dPfա4I KsGtB*sA֊n!=\J@S0LijEoޡU Gd[1{ z(%^7/y#c.C鋋6dv0o8H /h~CiM+5^0, }ޯsmJOrJZJgAr* 㺾"2%;A;%߃_Oy:4 >} lAlA,[IJlA,[<6"(x)9EydJf)#3Y*oG`[ɿ/LB7Dc߀0Pc `;s.r*ݥ_[Vb8 l(>E ǩL_+E8M|x"g[eEs wE.6jƠ\6NqB;!ln dLo1!1,%1sG@ ^R;Ym,s [GO4L*"J 3raXDx-*HdςcPJRLaa=F95xdf,J QX,aq%D!M%omq09TJ DTzyN^_>6 a.] m,y9EyOX֯-5ȮCW$ú}q>s?-[ᥠfc7}yGg ap >[|loOb78}|Q1qzupi2(bLr4E {ˈ$MP5,Ș_elf:+a hXPGZŋɥ-u0{Ye~TVb?AnCóSbտ5YH?Fo ОU^m%K ais=жazg]HU0JI[0W<3V#KW]23_?W:`k0Jx1cۜU-gRmmUwr? mv=DDN\vݹuGɔaHtbI.CA?NJ D;u[:q%TW.2 `1Uo'ɍ¯|}9(K.12Yse> FpM9T/q<6z^sPk3ж=1mݐh&At~4.tԸ̎Tx4'zsp5`xk^6"\/A_ k B`)T tѫzl65;:'v0y_|]^^]b.Ͽ=|sk}30/So#1ǒs;T tꮩbt-u5+PC˝-ܸjZɏ܋̆$@<7KMl ;LW1Zڪ;9P8 $/Yp Y)W܆4<8rlc̩z 36Yd=Vi9BN?X =|k.dw&țX[6c6RibrwmYg -}3 d8(d+G&"$6 "zzpo*loVRUz K&O;Ēj{V[VUl/3 QHZI[^B[ klQj!UfL:楖@6'4'U~pffWۄI9ig,$QORL ƙ^RLcBd.3iDŽjj1eG{ͩx0 )G7-4?3>݅#ZV$nP%wG|+&2(X4OKO @%(OjLQ|(wTqD[#RI\vG}Y #)'*H4b IM-aE=V 1pUzAtxTw8=N>M6qVgY\LYi07? }7kOQ^sk!0Wxzp+!}x9sF=C>eoB5M`v<5}&%)HK v+>SPT'pRaՉ`!REv/сPE d*H;,0` s.%̎<Vy4%iir`Qi?Y$uq€ u4W qla)RLp7甃 g3% =tGcG4 xpnS c rk9B3 g6=g:CVRN(yZr*L26%93BXaa#2, gN5o6 Pd)çD~ZA_ʋ)Vכ_K55Ic_^6%jY>^!b8@tΧcfhs6BLf'8ԛds vXLLQ͹d88KrJX)^OJ'K>חZĩi$iN&[ШV^EgPN)˯8HqfEa}fqY9y PvDjŏ+xcd,#p1G4Fl=eYSB#MϤH9SRk@%Ш6kY\+m5MX.7Df3a:8k# n>ȫuBu{`;{MņXB'|rnvZY!- xPgmMhg3 /(DY=KLW*5B~MZ򲡦JJ1ZQQc;!}͹PcPeJ$} = Wϐv}+sբ#&&MbXZʟ<&nr9#ƒ*sLvy4c 1X3,%0l,iXx`"'w౼UKbgtnW?ow_EK>O.V,Sr%}.әd~\(l U{l_*^rM bZ U#7l-im >6[nmUmIWs\/WlsWzzctw1M[yP^= 0ՅՆaX=ZkS+GXgbe;O_r7QYf=JVC2f07-ԥqQH3vrXn8:!Upb:ouRSFD`*aE=V _Aok;AεKq}9p}=N=NҝU\/ >Kŝ+}SeܴrsrxDnodU@@ ,6Wڼfͪ7Uj w+% f+Ϻ YZhX(V&a`kf'Mʤ6,+ۈ݇2=}yJr;!-o5]n*mtJ8|H vLqk&CMIַf:c8r9C^›m^S}64ϸz;m1+ɴY4}󴦣:UJctҢbJZ]oxjmՠqp8Sd/@.th%qTwmvƥb~-waφq2v 2*WoZl%J={ƒ}kCb dBf #%1$ DzbC=cR ѳ_4f]rW:$|Q}+lU}\ P~rJbA c*"/$&ASc]Z+U*nKH- {LrIX8jRJ)^8OIȱJ`wLy ,pr -H,{\__* 8MfW!׳xz'|~iM< ,~;orX'sjAAT'tLG Ʊ@#aXJb$~$ $0ĸfWX^c,)9 IE$2HaAiac MT( QObpcP7~0ؾ~G:0 Pa(#@4H\=F9\-Or{S )SɌ*H넒ڦ\Z¬H , X Aځ ~^:jiBZZJ+AȜRa"gvaAde(Ἤ(GZa0FQv {LuM87dܻX0v/bq~Xms͎ز@CS 7Yf̻BKJf~1`dn!7J#H(zyΥnQ65t>4g}\dq8cY+4` tAwyxH<Q?eRF>*LÂ}E4뿾t~c|$K?\ }d8(qIlA!8WWC`7sYLҁ1c+pˈ$ Ș4㱿KO KٿJ oc: "JHxT|p,d^OY: ?dLŇ91~:&dKLEDFogYj-O52t S#RxU5`,G{}M.ar6Wô;ec`Fr #rT/'$Qp^gL/l_ <3:s ZlGFV*X.OE)rCg-pۥ*Ory[WLp&R K89Vά^V:=׉8Wsv|H )Vu҆DB5SՕkE!0ҜOw;f/ހB~x>8^ | Bws-Ջ-l ~zd-[PUO\U+鮪Rݍ=LG U*~-xg7Y39peu6ɪ^+  З왌Ď#Ʀ,uC'.Йo/뱼Qpj4*tYc3`߱-_]{o/_yuo| LywY5jt?6u?}u ͺZsѯ@rE?o7Q7]olcY[3L^ 5Du 1a7l=qT p^ _0 #kA;+S:[9(iSVVP ~:FQE6b/se"ދq3f:dNYo|X-OJ+mWy`|K&&N34lw.b bU~NR]g!gŰt!-nw֨pN??;s3x)ho/v =n1w+ysQâ_Gq]9VĽH1xco8ߞ~^;ߍQ<Ȍo؏}0RS u.Ɐ61׿_$15}S;"U9Q=2.r__ucTnt!no8qb`b?y3]\] 6jQ JԤ$A~¢o.~yzxńo3ߘަP#%5`Sb;9rR^g$/6Wc5WuYs쐑2V#5/蠠.K nK}}Kʃ 1MAH!J: l' 9%8,~%-iZhs1V08N FA))(pp0܉Hxѹa`E+,x;KYr60PZGEdQ0A[R.k%)$JOϘf՞;7e)wvSz'[=tvu@ikn @A%hlVٴbPr6*-]I%ꁫ)35A=-rmN*)R̘ MQ(Q"bIg56_K\>ƿ:چz" 3.7ɾLgTek9͹XS]1xW95[͇O~ F^OX\f~PP*B"^(cF83[Zn|=0t=_"z]nHM[-_k/љij=/!!gT|RC}qH粍't땞k⏺B?*#h*`n ~p4cf%%~p/m4^l]%^\Ȝfl@r kJX.˷DCF#` -XOHp[u8lb81ߊMV쵢6ᶽS-dT`)eWZ۠A2 mox:O&KZOdCmZ-i{}05*Whe&qhhc%kLkJM%lɁnG}1c9G68f@Iزͤwحvka(JCMdBZ5qi8O$Cֻ5M}Bi&?=UdqvU&8[)jע(jxf9k9 ]-ZaV}a=E4nӉVDPKuiLngxotvK1LʎCU=&Eʰ~Lo@"X!l&?m7̗D7zܓcQmXKO/R; Z-V;;&36nleX]nNZ {lda&P3,^f k&.^ Aw= o|-: ]Zh';B a.6\ϻSQNžHt gb%BЇXNl8pxU..θ(,O՚㭒7 0]d+-\+'Lf'E~-"y#5~>\QM7GI jɲW9I#Hρ-$m(cW㎖K»4xMM;j=<eǍO'Qmǹ=Ҫ8%k[sYg|?ghmaw4ߌG.lN@/kE#ogK`b@k>ur1ܗ7uc[6;IҋdkϤi΢ұQRSZ/c]uа \S1DrDPU OY.!=9 ;W۲2@|'7,p)$ca)ha<8sE:ϐFzJhȀ8v5ʮby:@C23Sˉ Y{/IC3f^CNƫ{(݆0Zf8rvX?fTPD!!v!J_2ːYӖR +Þ:yp4B#e׃u|X3u*+g(.̐}0l>N ˮA>C)Ҁ|k:T2P f`w9yS/A0勸nPY8J͚r??G+e)?RE(: aZ}qX&>|@t7pq%C0!B*a̷ S띱Vc&y%chnDߧ7_:_z Y&k\ETЊiËǞϏZ^?^ؒ.8ѵ%/~UiS&wY`-.qܪ zv&O>l)+b!cvz0v=z-,n  uxT%1pz -,QCvYPiI(J1,,VJ\z8Z}?Mʹ=E};%ɖ˔DryPˍ@;"veg&@)n╏S\-W12#XP1g&vbk28 Һ=JVuKp%[,l`PE#K"D%$@tY$h) Ӂi]IՅؚ~`e"88fSa0 A1 ('`# =rlq/g!;# #$$8XG!1 z ",iP%LR#1s:օ5-K,mk `ޅMk$HպKށF< xVZj?~br w-ӮFLKౙ^ 8syW岯),hc7Qn˃Hyr\~X" &Q**+W@wRRpY-./1WZ2z .E"SMj,(%Z؁]8WF7f1 wcxx1ѧP> Vo:-G.VߘIwyTA;,b/Y'"V۪x-_.~d;.zp۞\8ϔ;{Lh-V9 mc0]ږC{FJ4QVy m7O !HyjV|6, ^6\/V \0 F9]7X\wbä1 7Ob35͋QKsJ[ʬ91Ce}o#b BF C+Egi1R "%ԣ= -/ 87 #$ Fj*e;Q:ınX+n'1i0_d>m8a(٭ ~:}Y=꺚N-F2dM}F.4,`n47Q;W} .7Ww,LC#^60js6%f QsHlBHU/o&}1T2*Cz ^ wE%Kv4=oSB>յ𐪦)Le87Xz6-w7a|ʗ0̒(tTsihߧB_FO L<v;$Z6?LOg߈Kn+q0|i㐐@$:MwU@4KH.wO0NGs6vx3K( aAٰ)T~ 8P^.Ru)>#*(`0DcNS3;˜py'UQaw5`#ƈ8HO 1vܯ<~mj@[|!-n2iآ0KZ7 0(Eb![XcI"|ZezX7cB)7j<4֑aYqGkZ D h$z!V[TEv{ǐ'ݗIwv6Zt4.*6L<Ȍh6 o[Fi3wB6/ 0u=,ݍ> ʼʁ2}϶O_tdE3P{֤ɽ][Pm+LwI'.&ew,Art*@%W d#n5ͅ-J6E)&[)Q[M*Ffd j0DFxl-wm]s7-tyʗXF|v_Ѹ^RkD%$@tY$h) Ӂi]ȕ a+]qXBg `*lu4H`&$XGmL,-nO|%:Kͻ$r(_;mb̪TMj&h\x,M+2~3jL.1L i{욻 Lx44 f6ԉ&I족SwiPk@yQ(jԞ8y24`}|2L;{nOB=+COs|? C=ax:kk" u(F/s5>Ɛ<w1".1yB4gt~RvڌѶQf*yqF;찃y oܭuw3X 4DQD Tx" EAHL[|"N`>*$:cN=Hs'7幼y3ӫ.[&fYj47NWVuZ)cP&(8 ux!У ` a)5<ux(P[PhhN1hn)TD2'Tf/1$A (O k9 :X~, 2R:5 Y7!"9h&i-$'\bFH0+AadAigNۙϖ ESH XE\,OnJ )$QMKc5o|2_k1`,e.F️&D˨4A `[ZNAa(Z^۽Y؍K Ԛdݸ9ڸ+؇S zܛpZws`b%l;Y(-ͽ6O@,[ezYI\^_6eT-tx8)Eʰqe(\6;s];UdW _/tӘG"h@QAsgl#l*n_5ѡF/1GN~봫黣%gP:r8^fUR'sVlhV)+Vit TT:̘rMQȥQ5ćVjt{T?OzO!D!e!x0k5f,`ZFL&Fs+%1z~So<+Ԙ֦8ܭ^UM& *\< ^D^{Pԙ`pծډ7?uVANڀ \ٵ2D]>0 ;YNńVOtA7s^;™jWM 7XaRLT&o~mcӺ)U_&;,13'SҜ Rtӓ@F߁㭆ތ^g' \!J%1Yy⃜+[~.rPЩ4<0t*g er+gI1i% S');զ[i*ʐJNIDL(+J3$ y Ly"2Ji[cR;ĬW逰Z OjRz\Tƒa:dvn;$^HUFQ^e+M=sbM567Z</ױ1W ]Bk7 iI:sك)2 0οzM>JIQN)93ZF1OOL0&[d 2B:xz)!Mvs(kX͏ ;h:S,Rfqq M2ކO{Ͼ\^6ri;E,6Lr!ịuK_^K&*F+pr{=a*ҔfW庭A[3zٶHk?HK`B8޽3'  9UKn`EAe'9 ,-zkT51cQsL(QmGskEw9lVJHH!Q[¨EB;rT;2?^a]=힬k˷ 'z&4炁gĹNy$"kY"e"?k a lgmxFf+m!՞ v6(KoF[pFAf `hn)1H4-j]y \\F%m'Hp$qcEۨ ,Rg2DqJ"^")qJ7Db΂wDðqaƺޅ dXջq|iCDo{Ș +~vz[c(8zĈWzMBH)ZzlU!"uR"g =eFBo/Mxg 2ϡ9Mg m7K,TI!A{r^_{E6>A)}'{_Y% (<-Э!ͼ)Q6 Yi{?n߁ϲxݍ:viEOq h@EtNټ7Ѫ(&D,~]N QNE)GSa222zsIeϲF=0í3̺Mjũ` nĦPQwqW \d YAb)V4P}O<88dIlXgzn׹uVW׵/֎ڑ,co=a%[њ 2Qpf'-^)vl)iy6/v ڤkkcɻ;e4m3ꞻ`ّ{<m~Ns fIYګhcH7}__߲iw,9,yVr4лo ~MtWmmج.݃_P^,::Nһ߈(i="F '})ߨQ։mSG'w;|<9,AgwHk뇃Sy:TL`!ySH?~^A9|2y1C#UDRϙ}!QK 52]krϼuP*!?/ax͕m%5&Jوmyf,L(>˫y|Snt:J!Et%g=Ag+ȆF*JdHc`^vz? 8ef|GA3/yEL-Jd6Tg0|4xmfYI[Li:<C%JfxR1 J(\F˼,)ުfwH^$GbR$4l%A<]|!8EAN2HI0SQ"N=GRx7(>(K9b4*9FԺL\3;$ pD8d!jвa]uHIK!ax%Wu*xļpd)djI 2[(3;$y]he +VDNefNh+m!VRj&Q f|LYjcE^:֨fR]gEAfim"G37ly C[3Ua+KeIŹ0|vtUQns>!ax7 ^ K3oKZ[1sIx>{$ /VIWVCMVBd@Xx3yGMA]t(5 Ӯ;ף$\FƏy.p&emxŔ5̓T3G gJ EH{ޢK T֥p #+GmaDJE,ؠEF|!ĜfyCd5́0`$Vd(9L0|P7톯p,č(Zc[zS`B!lUj(TZ> [%8~eOչ=E[ X/Lf4xgT#o 7T(DV 6Zwa8x#\< i H&(?/ax+G"@5>OYN2rHѹfѷ":%lq<<#axŇWB>S2if}PcJfo4́0<hTўC7(UDg0|rS?0 y@!+e{$oX7m̎傺ULRsYgwHc0ȅ󖸑Sjt1VᲘ1 #{GQ M5,x`Hm%SŠ1́0|S{ə#I|ңI͔=W\6Y%dJ dE+h\Zl0uv8\I 6JnXW5U{#axP{3|6\Er (c5]=F׏fxH bu+PRΕ w i3u{$s+fTVj1fpFbVԬaYkT,"ifHNh%fXR(Kd(y{$ hnO!1خnnPMH5&Rʶ \5Y gDXPFo _w:Pf.LK- QG~JN-n9m,hsqf  v0hJ6,~<|n׏mmJ- > \E2Uc1 R) K#S-K4k;WMYPH$$ҭ͒Vɇ:E [H Z\^`_6y_9BA>{)ASpZɁ `]N྘oㄖ흿 quT 3X(>`zJv x4n*| G58ٰ=+p13@(WavPg| %I'k-ٔI,ekZAi]!,{+Bٞig|5NzECڦjv 4LQU7f+9]{7ov?/'-m~!$crkIM@y/7/MԷmlwz,tu}sɾ[ejRv".b~ ARHwN!뗷_7ACipe}sxTss <~dU\v1ΕKM0x w!Ņy><ˁO{tr`σˁ_ ßWV] ڣKZkL>mSS+ xh{]kbՔַp냞鍳J1pH WL'L{Z/XZ&Ug-R*"b**.(-m2X5iPOY^(}~WzX_]|aJT~/vW>"߬>v~&E.,ܴ~fkʏn;iԷM[ Orb;M&B??_]UVn|\ǴO>qYSl]E{E$`r1D"U${_)VE8A<0c'nCS=V]d (>dQE:>ePE*ʬtG**v+mHؗy()Àzzmr7C8 hX:O@@*YYk̑TVՍw81?l]E;cN'D 5yy82jP,EF #t,ݒ i}P:ݚ^g=@JSUaxySY~wv@B3s.,k󥌷$;wߙ@1A#t"[,W?}7׿o NLv/sssQ^oוׂskvE֕mN Y'ƅW>9VD1-d' ,-zk.T%(9N)猣"6:2,")S#aREb"D4FV StN\J-1+;2xhO]۬[w*,y1СI6ԵQo!ʳSy6A` AHRVa S띱Vc&ye4zl5ͭlehd4tPJ(d禆O!_{;^^7vG;3}&TX]Wy׃VvI{uhV$05a T ZOir;:SۭYv- hYTڼm˻sݝWٕr;=fwS?Ϲ 9U |> 2-G9iӣs>`.0 $- TT{+%R.ת&yQ:LJ5Ex#x+8>jK ^Ȍ ^`A Ɯ\ފrh!w-<-ך͆OcW%aݼxʀV` >OP fh/#Et$LEt!"7  X1[9j7a iU3L1(hL> 'H еpXGm&KU5Kc-! #$$8XG!1z ",iP%LR#1s:j-r199L1E#.5ɛMq7Mq~lfkӗM }F;hӯ>?mEnKN3uIճvS{ tRֳrni{cK1.#ϸ%VVQܵ*ho rVb&y$,jǬQFYJy>azqygN9QJ[i$]Wj-3}5fU7?bKt`y-˅@λlR:1( rJNh|u~c驻:CS j`eUWJ~/*)RxWQ+xQt$Jϧ6 >KIF7P&?wTL'7#Y5sy䮺qF"sou׽-k(R0)k|3BGPݓ}㛟t_RkH,}J+V1iA4[3ɖ kJ&YԚHZ̟:N a#矃K]?A~N_+LwشS[:@L;;;r~?~)߿͇>`n>ͧ? $8P WuCXX}ڣoIGf5y9jj={H{]sd2 eJQ`i1"bԂH@dԣiZb&ϡ|9$$V8m0RS-y"`FabvageN'}8uәw cu Em֐&ȼW i㰖)!4#`0D w crF;TYdTR2Fs!G0vJ7 ]Iőp.MX"2W.!N)x!DȔuYnt3̥\@\KnWwm`N{kryӧqL߀燃Y^Ltow|tn+rf9]9uѭ6f!ͧ@t4OqHSkLfs|=P?A) _ q6I-\YRKCD.j[: SֹۜwhoulekL J('*tPtrMqeFZS 1PKw8?ygLD/9|.Gj7 ?*bNXh|_.Q4,hI*)RHu:Y'/9g!J>"BBjl?d͑ *|ˊcv͞F=DWBH࡜.m +NBikͬ8M5du<Ena!]ظ  o ĉ,|?8񒭴3cR)!*/b`:IJA(Q5CKJE%im~;G1GVr:ygEoSm4I$MR.S(eIJPT5] r=ȉEZZ{ Jdd*P6zs"YEoJ&{ddJ <(T{)XHڀ\>h"\d P brb ϨW_;2{M؟ha,rP,(Xp|.rLZ! %Ǝ¾q5'(W`CI 2 x' %& #&Y R,$V` q_`~,nfg﹈ 3GJJB+,C w:\Y/9:)"jнea_Z?1qϫŐI{J(Q'LFg KvtIΔrdxG *7el$Z@J8|w$p_)Q:.0nFisK4sQL)geL [ *Rw)^R$ ߺ:99\\޺U2ЏFpi KHŴ-m½8:1*uQ7aF ~sǫyylq2z3 7HyM[nnh;(BVz5bo`F~ Qޑ^:GX6 [>Kg(!B f*|8_O =wћXtTFV:]QU0r68 ĩ?#JE#Z 3zN%ugAsdǿˇwo.}8L?ߐQԲ)QSv[L`aЂ547kdho0$m.yɸWrh{~V׫?޸F{3ǨO>t5+e+A(+8m['>颚?̊qTYP6 eqw4n0c9[޵ٲ9USBSuqj^lƾKڊ#@'F \ ~i~d ǙOJ8m@ÑDA'4"JoCĉwR3&e^(\PSё~.+Q#$OP =ɛǢx xzQTk`:n}N.4VМGCgjzn!6P8ܒ}[ #dzp\2!c!n9zذ7A޶|[q Y ;*U_7zaʳI%R-,-#TZΔ <0r_TaEz5QYskQFG|IMd3BDN tL n)tyCYᾼ.=wvWyc}>ʷL3?/o=?lY𝩷'SO0M^plCA ,$FE(1tK֥tv\ӡ(j AhC z)`2!\: R0b# ڨenHpgБ{ j~ =h?2u"LvO:l][GgbTDcPHj+d2y6iL)ԳZ4&!:]QXYչ.Le Ukᐎs \P-i ppxq~YX<08[˙n{tfxn5MJ4@2PaaU^XULlyd&IF l*()z=MpcFp5DoC%ޮG(*Q(Q$gAO!Z橏Pnk#CCOqښzg &o8<2qfk#qp zۃ}82V^jZrB┡V^{-THƱ`c)U1H*Tltm(]'W9FnpC7hnP~O:P5O3ƾ pgY/p'!(fJhmc"^6t.$, wZP@7S4Ai<*,2%1_%օ$hGUHX'c 3"pjT,eAry QFI{4g(nU!@}ЙE\{ld CD]dA9wC~ Sɗ6#x,5b&PCSVs,VgYncWexvb%[#g}y9~l%Qg3bFK1Mfo/y 㹌1 &N & &Em,h5T"=&bZˠ󾞵_0VMUB ~aȅ;B}qQ̵^;6yV:eQs#bMOI,!Pj={IumfR5:12o}{bv Xu`iT۬$ڰN:WCzˮnP^5Nv'-NZ~ry؅ 2 . O$Ii΃A1 "Zpr&Ys=%r)[-3ųlzJo<= M:sST6S_⦮ܤۜ߄zx3j3lz= g\q" SpR7 whIiA ~#\1Fͷn}5kū߆~a\!b+h`&V {007qFy8 +b|=phSk@4Sj8JxunoGUzjk+yjɉ>-Uqc Ƶ287/ja\wggh6s/C57]덠z/RZVoZp|fO9uFSctp63.m:WW'%Ӛ`+u$ v~zt'oԺH-$J&I xj_9ìt,8=ޠD5*t`F C-ߤb|{jbX>C5يJ1ct)J)ڭT m`8D\h+f KdD{$ӃKF\yqښ'rk܂)#&w=MT;,*2d"YC,ENd@"w}Ҥ) Jv$H:F:BU݄ Vyxr)ZnRvEPy ۃB$l !(%HB@YbLT<v@IĢɖ$T0Ƞ"1[>.PYQ R2}B4:) ,y!-IP p5I+6̀rqGP֕er|n wFO!E%ߒF>z֡_qA1 GO]c)0ȭlR6$Dr3<ٙuR.8ƗBMRU 72#Rh(EmK'A?{Wƍ +_6|:K6wUdn]. Ȍ)!ʯ IQIPqٲ=FwiZQ)P # njdl`4) "9՞K&@! 0Mwޠ/ )NsJ%6hIaZi1!* !A4x)rv(YZSHc>t?y%)4vTP۝>>^غVzo9Ҧs$~s$.hY~i >T$@s+FR \,C,#{JHE *\hZԖs/wrXHj>v˥.5^Oo`09+=fYZ'4++GCfjb5ЙXXOOɂOtދtx_B|ZiϫNξ g'7= vϩL΅}SWUo:WK3Vw Q]eFxWoQƏXHגY1$+ڡ2Eb7OЋuVnKuo_Z r\% a2;zPߠFTkcM`w&y#G&rIC((O?Vtʊ;RSocG$شUDSYNbQJ 8uN +@Ϯ+$4&œoiCL ãA`坴 T^0 or P?sѨdbH7ԢR!"eX(Ow;+Ҟd1rvw:#Wl~mU6e.z .SW]vA|0 $=̀L |w]i7|/umWݴFb;7C[\ݎ 5x~Fs>pPo+O mxŭߖH|kO!|Oh칵3gܺHi"}Z% ɚa%JD5%r-hIUbU |-NvYRBmOo*YrǺܱck|% >YXVڙT'2 ͤR.cタ)uve2W3W)J]|!X8(S QIL t.w:WElI!ec#;"@'K([,(_*xޣw7_?y(}X(? L ``˓Jt lFY11 $L3+m섷έ/x4ulv~`{J=+}7'oovA9xtΟy]쒻%{k]W{MbwBm-f>̿4ǿ5{y/w =lvܠֺE>&~.`<1G&i^mYʢw(_n;1gL6qnw,^ ټSTZ٬3%ZM;k.7oԻD m(B\m5u1RpqNh89QAd"ʃ4% 17xgTX:ׯ̔C[+|}_\ $)62Kۇ^Z^J|CR̓59ɵesvz!9APh4Tk4۪$Wx?f;)E[[9!0"Z6L׽l N$cD5%"YP۝ sf hK*]؟a,r;QqN }42'$;? Ooښv$c 'k (mR&P%& #P ,9:7gV".^hjP -zs9#"Wq}$&}10t/>䛲0L.obaeq=]j~z@U18o{kdMqk j)h]ŕVS.tb:C%x}_[oN g`=;mHʯh[ȵR,Ʌ[\~xWeW2aWRv:!-W!ƮV$U•n|Z}z|QŸJI.}v \8 J3c6^8ywgJ5=޽gknaFc}׫Cb4k?Lfm[ە@Y .w Bfu&!m>Ӭ2DQp*|4.zz7g?pJYMBf+rD^ި9}ǚAQoT!O 6{?~_ͫw߽;̻}͏p ,jFjK?#@ϻO-Yq܈]V%z;̫HGn[;[kmO}rij>ɇNwf. 7,~F1|CTm*QQ. P6~@[\|9L}CC6[6QJԃfe8y\m~Ⴟk;: ~6NFX $A[Mk~sԿ"v,H"AWPH(\9cJEAMPZT .*}SQ5\^襭*%SF:ZE*R%ЂJL2:hXϷ1VGz@.*5m eB8K#D锨D Δ=ku`VecIO>CS" WKwiL=$hYԃ~!97漀AHg<^YE%qVE[ |hIgǽPzzvPSRM ]/-@ zB$#{"*֔!A.%eKuHUI\y5Ƅļi OKC=$ϩ9ƽeZ%{fϨ8љ'21Լ|#^l4V@1TjVL 1HOݞ>`ھ  FM$l;`{8z+JftB۞QXBL$hSBv/H’To$h`f&(G"3A^s.q& *abu2ɘ5Q NmRrЂ,H|0"ф,;1$|%d%IZ'iSҞA(4i*j'ܿPufEsxQ'Wp%[(ق[2p4 pz4lxrESfOȥA켚\>gzsMS3Gzi 2׿Fʝt("Jx*㝨@@`=9l\`~>OPyy#pZwӭ\cm6Fn⯌ ߀Omoc<Vc6ZxnoF)q ƪ-w&28ř`+%Dȉ͌iE?j瓝-/^GSDc" irN%ʍV1y@TA3GvE#ILJ@4̫`(- KP Fc%"0Z8=9;ozƣA?ܞ)L6}Bix>hheB.j*Z ]Z›><4A:I9#46nK4NЙ M$ 2K-&MgO9xs~w5'pbyniXŬ\tdD}rn6 #2HMbBS&A>b<34I.)m?ʁ-I8yo#b! ؜|)TQ|77@hWK_]6m1ouY\^iG,K`8XJٿT~@RK mB|#V,$)yZi={ePN!s>Gc>&) :DTHJ\蔰|j\օW:Wݗ BVWwfȡ5$3,;s$cI^`H<ʀd+'Y:sI``Ng4'j4ge\I{cNr2;˨# j5q-йUx~GΣ yWe/}D8;Kn6$Nm2RR]w]zd'g4 sxZWԺ^i]Zos8v>YwH-+jH۶Awww^ MyP-ngݭy3W5wQvN+ `Mq?Nv_8@ ~G );tt$t$$?:¾?ӑ_$SEL`Tdޔ$Ym w^:4}6Ԥ+s-8q{O} Ƈʃ<_+V{ہVz(l;u$%e`Hkiu3Jve$/ 4'E~JNa:3qP1:=79Ť6K7)EŐ-(sh d#"r, XCcPZy3#pQ֦ EYJC/,O^t^Ϋӫzun&E^n àecLId`0CMߔ'_P;$&Xz7%:Yª,xtb3KgeH%<O%3 0I3d]^IhLY/)u8I(<&MDmA( 1s"s:asHUGHRE][(ɮh Fa ~;E35㵫p"b-4>84NIzs$JB +_)6I￳xR26'ֻLD{θ%eKqv΍x'tg)1˳4p_Y@'+GݱsRfljS.҆Lsrw~b4a\J}J+_3.~mN7%T if +^hE&FX\8~MDHR[omMֽYb_ksݿfO~|w=~=aM~1[vPDGmvY[`i$eGy˦aD0N5zX>!bjE;XvHppݘtQ>bM6Xrz9xd҂F?FXʽxxo4s٬Y޷tL:?=?Ӈ߿ߗ?|? w$uL;0mgMSXͧN_ϛ} ?h%[ --3ns c\\rø?n]oR$o|7Ŀ'D߾]^/v|+>;9C+4m"Z[.8?JHbYKY% 4 d8CUc:>f+pL[Nia ~}lr%CW&02m4֏o4u+u 81eIa\GY2JjBF-2:4R֑jR-$/ 44t@)1Ȝ"3Zct^ƙm e[ve9NI^_=6ʭFZ;kY\ ]A| OWRl{{.p0`vp7Gp79LF=P[fUW*W:O@,Xy8 VAWeD IBҶ=c[;Kz |`0RDOJLDx!`"Ι+RBQt Ȍݵv= ;.rՏKJS i<1Xf|,d~8 ј3XBt&[ŠG~g;z*fdqRi?|2"O+_S Y`Z 2ʒd? I)ef|@m UΨ!/J)%zR*>IsD"[nθv2 e˼Ptp9ub6c=껯 Ln'q^UwT~0}'7[qX#̣`RѤ6QCЀF8441f]8#Qt4 잱U+`xK JUH"@ OQ&CĄE-PM(!ՌdV"iPLϥˎ:{C4,gL hH6\ ! BIs##)Y>B6h!QZGi^5(JOtps_Me͢x}="p#HxչG7j%tco`d֕Vspa@! ]55 ҵ?+r]^A5X8X]G_nk2{aԸ O/7$қz76x.D%Ѵ!em3af'Ả[ "y+td\k.׫̋3'qܿ.mw_+ڪ.b9ڵ(s_NOV][f4'+wN͆}9Jjܿf9ox)C1< 4oxZvFmp?XR&cbgE<햁x>`ţNPiEXIN:T5߹ +D:ʛBQp8mp҂m+Naꤤr2+5DшѡB޵57$ᗁv/DD  }8! ZdI#duK$T#n5ˬX+ƅQ*7T9\kxcƶ׿ ~:%K!>(Ԟ<sйL|uU\\/jιN+?<׫*,&\@in\)Dy[ q(Jm+a5((dQ :O:\Auu*ͻ ӌmwzn:: }eO'BʚP L,-yF%AR鄡jگ/~itPOLxi267lzhCTni:!?B+4 ɕvP5w̠իbT=T_]&fO#,bVE+q㳋 Z1m&٬=C/~H0* S.vustHd5$`‹ S{{Y\= QD3Jָxq9@Po 6zz^TDϊ9=S0xA"O!tsp($֜J5]k3Fww.]_ɷXӜHit!ͥӳN߭5JA6=16AQ\Q Q, ͢` fJێi9S h33or{ VFx 73YᇝՓJW"NMs8@UumY;SN h<ghoT ,EHR_rp=v_Cخ}<`[9'!9VO@l v "GcDTqEXzp"CZK d=xr(joFMpxaR[ERcͰH1<5EuAqNw8 _K-8詯x.4MoΡ\E212ֆ4ݓd(`PFP i{em05N@ЌxbyirE؄ A2 ƨU%eIKȳ̺9-o߇ 'M9MTYdTR2FX93B Xaa+ f#Gh؄$>(KhB-p*#2\bAs >Ed/P>B6xFM7{,|W+֫J0mϝtt f; :"2*uP{kFQ²ԂZZN1';"KG7axB~е?5m iB[)g3sIOnU œA`<=+ײr=88O{AP9WNZJaOk'1'fkބ~Ժ;$!b%콤diJ Jk#oOBO_z kRKҥv &Lˏp_qf5mhP\;:qQi0.aXn9VD)  KJYOkE#F#F#BFP"D'e k@Ja2⭘KP͠G EmudXDSf;Gk]$JGܩ5gGly:&#إdb={po z~zu,m~ ۜ*"Tpo/q$SY"B 2XK/¡ȝ=t_ o࡬tKXH;ؗK;~ +7‭iU"bK(F[ :aFhNPx!>'^ ` h?^T}5^GnU= AA(]BI13QNh@hQh|nb )|Ke$"_jY"MeCK5IV(1pЍs4m\MPNs+;wu^k^6ԱzCf*} [ߚ@1]UsCh|K4VsU^󰐫G{_,u7J+0_ $tQ J1uf4 W{{>"xu< dE-J. -Ҕ"BCs5g[0!}(*o[n(>4O"=G%J D-rZQt$JϺ6 \^ Jռ@}ЧEz_/ǟ&\΃\^v>~ǖ껱s^_pO7n=g5UcM骩\M}?de5ч^Z{@\uk:'+k%h}N6WXZGx"Lu$UA|T֯X$g>E>X\Bay_SxWՏ |qoo>|˫͋/^zo~yO0TSFs&}p|ݾjNrU MZd[[rCoevlo/zegej"\t^vkS(L~+lPMgTIU*Ba]շ `@_T6ocv;.c~0ivBb8=4;PMcO"2;F#>Ks|-vEYÎz%V-5.҆t+* ƔQ9I\OPBz6[udjs!Nk($Iw9:OY3B*"T* ,~7L/Trн镊[`?x LqˀY, r]Ko#9+dG=sia8X`Q*-h`S),+%Y.[VR)ff|b0>dELJDn( ֫gSsIaq@GJ(2Te 9sM8U)-=E2øc{׻p̓ϜbFcCe3ryp|(۶  ?zrVw`X$ܴ yV^@d-F1A(bF笑lFBÑpȂv^V%d=,- Nh|L07!\)KiMy!O%ǽkd=/|\$ֿ:y!Sy!͙0mIWQƢ1O ^ggucˍrL7AnjH1K!^u]2R{݆N pݠ()"_N -ǥ zJj9gc*?3_=sVZ791.9(Ÿf|ȘbJBKڪ:3~9aе/刪B0YSsͷY`MB>og**dLt2'iz qzU0kʉ_&ϐӧW\1v~g Go}-Lϩh3LfL|zDѴtI tV%w0dTdNZ%0832]׫v3~i~}\ۿuix mӿvkw}-ďC̆.q3f9pS mgzVh6 ?,}>!N׳@F?MG3#:t_2 ~#._="s2G`0"=0FP0(=cL0֫)ФdRki`Od%k[CLw4Jnnˇl8_UtV@sGwtu{n̂Й%=3gaŁs>s+] [6x)uwԍ:,OFc8vg,U)Uad1sq),;u] c]&Q;SέmhBpИ@yS^b\q8x+%N*F.F02k YI >) *qeFC:ÒҘϦx.< Rh~H1d| 0MUߊJB \Γ[;UɨB}p(T`x".\*{lY^JӅN[ҿOϬ[IΨf`q11IUV>\:m{e I\Qm& (gYiE40zi}T',sLDӈ)8V:ۛu!b|-(9gDݒ14l@ ɄTxkA2+A+n2 h"jieDsiNr%A!6f4;KƱH3 GV.t s=W@m8gL0+cLR]ƥ R=x(W;m3VIk%Ig7( Rp&,xl,Wj#}RgyC}WV?7FA 6cBmvnA`=& 4§rq6rqzUey)wRSYo7zSͽrakO/$6$R/Xk73dVrzUyoot]M[-Գpsm~^Hg;7o׬㛍Io9RO `׆iq䷍m5{u=fIj~wi{o&0x^}MDn&᷊o&3>a˺_ "=bbu14-AF&*R^tdnjkصF֎lAj*;?-:};钋룓}fy7-V#}9Y(Tʄn޷0o#?IdZtyIg ߏZyC FYY# 4>@nkCJg"ŕ ][R#+$r18n;PCY^UI]zh0#Z&t6v|Cq [era^m(.}O.(mY8|ڨB"3o3x1xFaj߫Hjk9Ab J3 ka)ȣ2o7gj=!PdPg'E"*nZ:W!Z\"6O"xɝڝϭffN l9ַo?uok>>Zj#p 1f-hGYPh1*$I8 $BWՌYr@FA\Y`s"֌զsD^Eta58UʺPL=wegpu];`sjoj@ӛ hT:ͰfJ)F&Q`)cg<*or"DU?d[ɖWz-D5!Ep 8&ȒH6Ԑ@ 7Lf ,Q6{16b a;Θ\kJr)BLNAíD9BY,WIĤj`gA`DڈiDګF@qՅ٣7ˇPT|&3rZ&o |z;y]F}L_ۆxoVy9q"`6tو7#qnf#˟O^\}%el;9s倹YՅ5Ac准G^5N1&$Zm'H$+Y:`tYܤ/ܐV |tp5}Rl8O"UtV@sGwtu{n̂Й%=3gaŁs>s+] ^Ҝ2nԕޭkxznP7x(?۝T,VbViEKoP4+%UZ!ѸrյLˍ6u4qZ5jnb q)vN2\D` a .0M?c03h7$*MΫo:oLuu8,)9KA6E)A""JYAb:gc$uY=<&L|e铃sIR%7IDPZF/m郑+~ӸBzzLj;[_HCBQ}nՍI,;0`o"VQ[-^ +$/ʶGk47z#`T2 J`ho2Bs];Rv.>eC*.\wnڅTaNsu5d x̌6, KYGO@ei|"jLЅ]v2vweqj|̃. >`-9 53}7u~n904 kW biӇ뭰kg!F<֧Zz ysy!Kz!ΖDwcwo!;|^)B-mFR50qnE-T` ~~բEd4(4 Xt6!X_1 Ô np*"Yۨ(n)swIƅsZ< "E,f\2!Y:{zqJpOhI{lO[8_w1~ [/!zЗ?~4KR症Ш7XG!he3bM"E;Xn-]qO9)dKk@19ҠD,b!9,FILܖg=|d0,13՛ 8iRGD*!heAi~=&3`IuKQHk=Zh@=m KFtV}OzϏ>Cߓڻjj搜j>|T& SwV UK:YQ(*V$Vu!;'tH IIc'{ H[rIQzDLq/t}Bglmni==pgڤ)<q:KȶxY,7_TF Ul3pKMB~05sRs靝}RpU6">JmV9&M1* »n˫'Qj:I8&L$Ev0l+cYBxNɎbglNj0]iO˔RԉfcQljC ɆTIz'b5l6]=J*EhfvI[b)8 J%Jճ-b1I%߁Po&6ec1XXrL*WYIBǻa*S( ћվeKeJdtRNX z3ZCPCm8Vhё2-Ƚ푌)}.R@[軝A;e܁ й;4cއܮRB>OU\hc;j2ʻB_'ŠF>h!;La&I| ^FEBKb@K@2N=& TԵʁ)Kr(נl[mSlCC c9;?]~!h޹·Ή=v 7 Bšk&jxf=w{b>}:*ؽ,g2j_)+YTYr $0R6>)9iRZ YCY`%f #,9$ UI:#g߉9>e|WL\cS,_Ȳͪ~=ۥIy`ljs4-JS74lO&Z0&;-UoX>u IG]7'L([`+\4.j";L(AHjLjdҝb/2!hD)HBYtdRd%o(&(v:#g; v.yeCބq?byz>L]>vwukhأBG;/ 0żiV@Q6ȫ.2p5C hֱ6m#Qi`PRGF_TA iW}U9&y2֗&Wi5 O-J]ຽHeVgӺ&M8acn'#s"g$V Q3a]5h{zu:@BIոȔ} 9) 6&2" E?HAZhOv|6N/}Pyk?;9B7Bh36^ܸHV'ɛ}&o,s>a)^K@$˂l  KZ lCDD>g ub KUwEB Nc:xv;#g͙n7GƟ5GsF&G= uvd:d2jor9ӶtJmη-]wݻM>J ,?ۙȍMCnO5g]^?T*[ѹuíͬɖNm>v-nWnbﭞo|^sWZn^5r}˚n'24|K'2 Ks1E]O;"eͳ~>}9ggڡ͖]zels+}-^c7ҟHO 2x*O_| ʓDLhFe$iOEF  I 7Jd;܏!-czz}+sqg@8f-@_l? ꋂP^'WG/Fm-9e*Q_C&+IޘY*w5PCrF爺{'}#(2#`C#,*k K+D (RԾ8 >[m S]̉xD DY\+D/Wʩ ^J DFnNKyَݧoI7_%!b8#Q鸭^v)ޯޣѡ@dUVT$ت "fNA5Mv&' nJJ %Q""p2e$XHY+AimMrD 6lDfW)0;煶)bYC{x)!;= `M `KFI?elY-6e$j*:d8֝=qO f}f49%tTR CF.\L zͅ$ff\&HLGH(!)瑬M!B 㺇@]|VG^2瞣%%j 먲N6)\4kOAcB"TՊQo'v3أj:2u4Bes)J1d!+*c'Υm[ޟE&ngvJ޿}:߱ӽj1~43LZ,3gRY s d$3&$qh͸,A"xG\٢V<"8L>dS8E$?ٕ̋f6uδLKm~=ͶIdhjd R]s2 /lNJ GY[]w CtyX[kЀ eEporhd_uw}l<if +-^WQy4Θ6ºMk+.h T0iy`qyq3ҟ3?'~_,Iv0ſvhx~[龎 Gywl .,Jmm榰F@L8:Fb'ї@Ͼ9w<٪`[]=dWJ*Y9nuV6 #aE4[_HAg|0Y젽7%㦚d_ NjӃ8 qǷzϾ׷uD#0X tuh> |Z:p~M+QijoմOӺS/|v5y[vsk ) ~8m8:e ڮEb5 ?N u`~Ze'.oW2*x XV+nEZn`OH j&9޴(㊌~eka lB K9X!8Ng.~LJ=Gίۨs#Cɺ,"S,Y s%UF 5Q˨!ި{vJ?6ui+s 9Af,o)8í!y>hsG']21iot_z"I+Wo$^1KQ/>jK[' JU"0pUg<-5Wḱ*D\s>2U7!7@ *& %B >7Uw`u%$](g8RL mN2xQ !a:g|J1UY#s꫍֩6zW⾁Be߻[9\j1yf9:*Nv3|ODS4-R'ܨyH<P/R d 0qNk@Z%y\_uUD M Le )YAƥ$5gBJ0,%"ޣ1\@]J_ğ{G=Yiy.5Pk䣧ǡVy5`=~V6[XF?Xg)፶67`j,KR Yg=ϯ7e},5a)l0VKV6òc ;2 19`{WHcVOq֥Z}!)=F$|b93ᔔyiVu6׮(&PdQe+SB/"pcT1Vo+|ZruO"_-9ydpOtx+c\3:q|XFĐE Z+\ZYً Iz@,(Cǵ'uskk!+B^ʒ R=K? "yL6gZzhEu~͸EVuAuGkّyqrς/8ͺqAGɧ N>ƶFy8V TBp%f4A'V:"tR8geT^2 C|L(jSj5 L&sY8s$S;k00Zq*Z[Zg5TUV@+628FIOY,ll|gDjJ,>;1clVid]ֆd@B57md6 0қ%Di=zi/aEYG92[x٪fIG&-=bV`#+JF C>?*(I֍636V8Dt)$! Qz1J'LIq.N_ ʟ>([cs5|em>E f _y}D /:I602`.u_LI:sƒ@o%rz^4a^h:l8^W~yaX^Y mӽSv;oSOϸbIXifQyuՍSZ;Õ Li@$A]a =RNHg4;&A%e89%&hY1"[fAlbbݕqMmW,KˀꥬMbFGyk'}4( 7,e! ft=[O6ZP&kfVJnTIp"T6Ę0xX:cuུ $C=2PW}M2ET:jW;՝=2l(S]c~αi[.|b-q+:Ҟkg2Wc^tسHɎNs8;JC>!&_$㇆9Λɉ(q5RđRn> SgFOaFA;3JNMUzLz,=!rz ّFкR[[4656_8X}V&eHO82c~!a/l0mV w7#]~n50|wlt*}J1T(Fu\6AG"Ykc#".nKE`wne__I#N2WlI"K)KNL&fW>=_̵wWgs:[Go|Aq$^;7?SE> }8J=ow觧w*9Ldvu`dCgt抲yœloS2m0I;4d+9&;esb̑StVy^`5qo.ڞ~oY"2ԋUxSkv|#䣢鯋tĕ@h33;&1o%{vIlZFwimcZ];U7i{g\yy救C_?f܅oQnߵ -N `{+w.k\r趻דm?@5^:[sd+Wtm+[ny:]^]I$S%O?b>4]N7;SO<(߷+$./·w_ǃE?|.}i{s(}Gi0-!0=DlU7^:<@lw?賯x?ݻ1JVI V? c{1? f'(,D]j&rʠ__ܛ`ntqo=/ Ż|Jw..oy0_ /ʕ 4~xW/鵨ts+?9R9QwC 0ym>3KY&g)}9]jpL 1F蘶'l"Yhkkjkp@58ښ@θ/WK-H!S38. hm+K%g6rcYy>rl&Y'٢>}`F;p*R2WUgf_>P<l}kWi!,*\6vw`#v UF"ϑ&he֨5b/d1roRNIĐ鿠(" +}3*ƻ޾.5=W\l+t+<߶f?t\Jb#Qf_Fx2~/>"v;Г@'x󯣟<:= Zw$@ԑKd6(+x)ѫUrȏ9fnlp C(2W sg;=I__\OMhYgYrQx:mL0EZ̝I*Rx+G͑O 3Ak~VpJܩrM ( JXTްgMzZt{ә {4t:m5U]z>C qG qG qG qG qG>"cX}o7-#+эEF7v}oEZ#- i-j7x߿GXxR7x{}o7xrsͤe5G#k|TcFB5BF!_#k|} 08W_gëڋ< ; :`vV'ہt>'cPKֲjt ʈ#> D%S+RVyQs _O塿`E#`YW teHxD ]ࣅ{ 1;1Q6D-lQ&(?NVO`sۡWb v g@*Vkl}(]yNl.E38LQs- hE3>VӌBY w om,55-;‚ɟp84<6/ *'@#JRN WR똸:)@Р"qY&. &(nSj F̔1cm4gQ\I.&e-}Lۮ?Q$WRSddG%HVi G9en:WVg?%`8C*{Dhy* g֤@r5rD..G $4GL @ȄjT׼l{եfi^(*{Wu10㳳{ۚztKof,Ggz>F fK&yX2˨5H@ )Kn|L.Sl%9yk@$gd&2q.3[ Kvyy>svyGY''O(9Y\ߙv~?8$S/ꤤ2+56D!DU{6 (͂rRF_zh`ȔͧɡlIOݒfnt1w2nP=A5jVjO&ƎWQ9w*Ձc0\%8QJTu{Np0&o 2"Im-r_BEDll0Ǡ "O!5IR&ʘE)Sio~PM- ɇa?d+uiZ_sZ_=)D9ʯRbuEsƯ#JEgYrzʢx}mxK$ٺ GFN1'<@׫dFEYn5}- )dA1X <( 50#;FRSȱ$2-<(DaII3٤DI2gee;&Ζvdfif>C @N\K{g/1@!wꃄKȴ^$n&P_8-'əsǓvegxuPYGZ08(|)&)EnVP+\ [k=*jmIFrdAzo )CNhA;6Qq˻j:ėw;ΟoYyICWDc("cpc(@rVYA \Dɐ;DTOBeӬ^EdEALSj*Tܠ/laV206#7dNҁm* p$oa}3zqTjTLX5zfKCaUuZJH& E;,V'?V7c9GAG^/v9e`Tjj xZ/zfcO:{Ӿ.8 (.;ΥdI#YkŽ 1&rS~4wI6:YT)SF($(%.&6w/J:RbdIB#ZejGْZ,㳓5%-_ X.D-Td(P`] %2!WiQHB_u Y^{ǡҠO3@T2@#f4dcAH(YDrh ;.B^HqҨHD߯nl1Ì{) hE9&yEv3 bV!QٗQhRKV-5ċ(=.&1DmІKCI5(F8!5' o>R{$?_?JEcԚq1y>R@񎲹TD`f`0M9MyFŇ2lBi9_rYkO/|O/bhf,?-{ӓկ:! j`Ya{>[K.$;f׷6M_ZnvJnpHr͏?_] La><e?_B UٯC y1KB"jg\lBa^K%Wd >46k琸fMUU ->?JǯX~ u8FxWC Ejݨ;kBqw9W/~ّu|ۄ6eZw^bK%ޤ) e \bbjQ/3T o9PYSʓ^y/Rj KA^ >')7 p60SX/旃_pmd>nLwXYɮ r<'7]<nw=Sޟ_x!7.}!.]e]yz9QEĘiBU eMX m sT^?M LW0 VlɃ<~>y,<=C_`qaO;vpӋn`m%p%?֔a4_}ᣑ'ڬBӶChn3~PY[RRUk7LVI7xwNK WIUR*9B.*րh1ccd贇1U5vI;|:=s.kTJL%F*T A1*XǺNjx~|28ϏWo;ZN_yQl"N.ܵX5 TdjPcJF5Uݯ%MC r?RP Yxu^btdȩ*{Z(PU1٪͝UlaW+WJ1L)t\ rJxIVU#+#OPr{y\-_f[zkOMr{[p\-bpT#,d ̆jc,mg~a} /Nb Ll N< f&dNPDãvoqzúv(BvA1 sʬpnmAWg3)Y9Ⅱ/!e䖆G/sv4,߬Y6W]g?._ɰ?[~;? ;jl99=:8^xُ/f˝O0:2 06?z87ރl-N~A<յkL$O+;g#df³e[2AgTw}rY؀Եj𹕡3DAhQz3?$839lϕMI61*NelUT@YFo`t4!n0ԝ C0 Jee<^Vځ0olٚYz W\Զ:UؒUw(%u;d#q!lj\;edMA@k~ZN|lqM̔t$l.|NX 9m$c4d}&33*7Qq]H %cL1yyJ}dI=kKhJChIo6]ޭtOT䶯j-zcbT2SRl,qRܞjQ-fTLHRUm5sե >XvKe?~+gpX4Im7}+Z)o-A(VKr":#4Qdzs&ENE ezJ5K3q"ܵ M档Ԕ1bLµ =v_BunZ;7cv..6&{`tlEkpGYN;x/Zi2U61k(WhC.62Vx; Faߤ@.\:MK>YfE {@QQ - \cu0QEcOG7MVTHюr#`3<#JƜ؅m,+;R|7HFU!ZJj3jH%r z[u!H9iE%F:>kaxX"quSt-wfl -rI!a##k$$Gij|!46BZ;EVOL)O lh5S&81rrA>{/R.Q Բk)ZPv)]@!Ly;"4KZ%|./V[ l]S>#(<ńUEFG {0N8q/up;g8mTOL&[e<*쪘 2hɳ-cMpmLt%@(@Lk1[C;uV {K5eB`o6Q=Ҵ> k9+x1TuPڑkj &*Ze@ԓ.e !9`;세bLjx*YDԘȬ  dBC`ض`xAA\R}S [`Nj*(_X,8NH&``e & T-r3tWh%W 2[4%X"(39{9"[rvg- eJ5g@AGb"ĪBl 2]ﭵ7)Y_J%;clu>A% qr`fdb'"\VA@œDC0GRF UWvRmZ$H6.[]:Ն?kÃr{FC#^I%wmm ǁ2zJS\,,@Mƕz%V  U >N:jUNaek*3 _X a4S"p#fDPpHjO ;((qK)Cɵ=i5­_@\( zݤ>j Vn"|9\lR,IF 1i(h uvtdYH}x)BzXAe!UJ Qz*5KsS9w{ wi2i6쨃8*;fRq}%Uu1Wr Lrpdwd r>6jIUfUXY-EaN&[,% 9PE\%i%  a(qz@l]q9wRHgveeοWoY=8uA͏5/zϷEI[?յM]>٨-c4E)n@<#ԅ\gRqWb3ZsN|s(M;微CԏWn4>vGk7(O+SA uل2V5b}.&& ϴR$be$e7G[(<[۫"Q4 ?kL\hB~/R9"N_dT·P/?M>|?XËkZ\P^Vʘ\ CdeH̕If̓tܛJ-3Λ-rj(D)`Vyb AD}Ђjb)J v !٥ SwF?Bg ߚ.=Pp ưR?F33:wa;ލ|sKJ%<1 xeZ3-i'7L#REe۸eij.#[}kz h;]w3{qq@Wqܛv=U|m7F$kw?UY6-Zť۷햧x͇.ʫa+L5uOG1\E]!,/B`tGqlp'q]w6=!-qwhHKj{E|>^~g׏ 2Ax~:s?>dl>KÕ"H#ښ(i躣L>hz30s*Ao|~+.ew7?䃻>dDnÓ6"YY]0Րŏ^W7x/a|=~H,_ɖ٩T ~_U@M,oOo6J+ ^Jm42jr[ Y0 )dƉDc{$uȴP$Y#^:^XZ6[j{L&hon/aKȉ2" a7`"'N W%Mc^ĨBh>|-?u'gNM>=m xNUYE6ATbCB0ayx'hZSYn1;ǔs&w]~wt dZJYɉGA0Q R1UH1LՎnjJ/+bȻ}Rǭ˫ⱡ@qv 7ipW@DF)rYV5':OXOLfފ6NUn"k60ˉ06kj c+-S[N0d4%H LZ(d.#Kỹ* >u!Zo:s\\ww?n[- 7\ڎnXů 7h $Jɬ MD3a[/6ـv`f$稫r.`^/V9Ve!&ߣ]}i6M?E!N̸dYe\E'IiʲRzb9*Djʏn93):R O&5$A)QHBC h)' r,$Fp<壶4GKh;:oKU=ߧfl7Ne2a%4ʥr(.*[P$'(B.c5sp O}@Z jB)5ҡhKƺQe&sMZ(80>2&D$bKJOKMMҮԆhHVrm'ru\4yfRdhǏ8YjaLHwi|yRN  ^fEY9XcGJ UeH6A\+\:ΧǦ-JфTίP8=F1O;Wv$^}1m.N?7Z b49ao'Difծ0W\ԄMuL\_x9]L?8[n a.ݧn?.&}o@G5م;/!M-Iؒwt knfbLAK/|}upf;ZrJVY%ZmxWpҪy(M3F຦NY?Ǵ3 NWjL?!Lh^-O9x<ϩԾEo55F0N=w0Z'i!C2 As$:nB WʭWRVVxReuG VOToePt 9PQ(EZ%("CQ(BCq-4cY'MB@^"m.W .Ihȕ*3|dt8Q$V_δpI -I҅VyYQ63"hDYZyxŎgZ4jWpŌo]|g'ֵ˦}h4YW+Ti-n0cfD&VCR zeA^a1(x Yl%9'=‰9i8b.kbw7 +Qb$%/k8IIF T9Ls.Ex*à}O1&d潆L+MxnhĹQiWÅw:79N:[g} f`XtweǍy(6}[) )E/h!w[*D,C`u%vq\1.{`k͎P{`K eo,s 4 x]X"k!*ZpR@5BV& 7xH  Sdģ Ĥ c*)>1"#MBW;{5qak/` "VOED0  f|RBΛu-4Căe]0QխDm g4l@r5rD.8NL&tXrP;8yaQ>uV%OEUՀ.G}t \Sz6O[)1vV{˷8tovGJMJ3٪C_d֍4sj FPJzW-Y>dEnǎʟ.7nM tUo.BMBAcfRAo9}w|8?RCAeHveVЙZ "9#7sjQ9?klOg%n6cnqiefz!FY^2lPRhi蹂wAЛ r_A3k|nIj; w+PFݭɥQTZl(zn}_yU< zlh栘bN1YփѫwVA)^X)6|: Ӛ)Uc)K6֦g~԰#]ol>r}B{[=]V-r6nr%"6 ̌f=-G$ꖓ{fٖ>ߓ.'S̮E\>ꤤ2+56D!DU{6JW4UCf<g7DŽɚSپjs㩁2'IҦcU}Y [@Ӛwk7Zsuw& 62jހȅ!^Pk5]9x,$ևe<*sidgt(Z2t9{EWKvC`]D91%c̀V"R2&9ٺ*p `ПV$g4/!ْ^W+".WV{!A IrΔ~'%}&@T څR.p\ )-0k:k(Ƞ)+gޢ(1`!' 9\GfDFMkY1ESlҗP,!tCS7L3(CbgWC4H>#9LOaVlE.y*P|^]àc}1R,C9KBLs*VJV>`h'JrҠc=ұi e}`~еW]<3Bnc[epfаx@ׯمu7ɸ|**l%77@YLZLD\CkOr dLhWΨr x XgLHk :BO `qTu-G |)C#t!6rd\w%KMBSA/AgɻL MvM) xu,WS;Z]L*qD!TґRjg&v ~g}z~eBJRmr%.{%{ $uϒR IsҋV2$IVeSճv!#΍៭t~a*&ˣ ek"\ɵHЮ][n}ۅ& ˛e6BwFСs~C:Ztkr2JΤDScY?1$sA[Gօcމ9rF7xF瓲їi Oy3Vm ZYhoہiiYn>ż̰Yi\ҴXbET+a$|C&hX^9K|'bO+j̊~2r@,nD,dw Bx+!(ADW6RR*u)/Fu\r6cB#\;F5qo>G>m>ɧm>}}=7dhdZ|וTw*&2`04VԠ3:sEVDhk>ל%EoIڡa&[Ȅ4)v)1D#ӄZsU)ʂ۔.} {h1s#StVy^ 8;wU?|z}19[G~|ܼޱd|kkT޹%> #-]w]׫~+Ftͅ+Dh{OfĠ!9a2si,ZW½[b赣Mc52/{^iy>w-ndw Puz[|tKǝm|ɞS<雩|e]r5 ? thW,6?m΍P%ZJ ]nkE0U"bڊi[Cܔ &,Hߓ=7et葑0r}ŹԒLs!A N@$;=J{QLn =r}*M:sOEg}*vUelWg"-.@K%q}JJ}>%)$O_a)jUqy|WE\WEZ}|W 2) /^WEZ }"\B"#?""E\nR0"~ w|kFuiWxqҚIi{W9UuZ9XJV7.+G fсhz{&Gh:/Fsz9gTË`3V ~sKs_et8כS@_*^[ svG^̗d;M2;J=hqY̕((ۜO7j,DrzŸڳR y3}::ǝ\N:Nէ{+]NTN`M6^yd <&tha%м,Veqei;(RX+dH{ઈձU֘II0++?M46gdY\ۯ/JZXh`1 U!hj 1ILʹъ&J̑e?2eCh-`:1wY>  !;L.naP w-ƬRyB~|sBn`^!]׎&PUQ$ *{ϊ$Q1K.zR(0fEt)ak8W@J+Bmd&vdRfSPVB1`^N*e9xH;fϝ]BW3_4hgza6J+ H6\IrBcFZ"tRF EqY&r@P?Rؔ!3{,&:An-q#q[kv4}޴{Ag<.,5@-S)!+iU]Q$W Ɇ)HR2Q޵#Eȗm fY >[J+-+2qmuSd5b Y Vi!#-+jܯ[~|ɰJ wE#Vm5e{Qq-A`sK` i6ZFɤE >ZRI-0FE1*)em@$#y.z&D@#Y\INc9Y;r-r׈^u 8]]%_g5.V/zQz׋8=zzqWa5VPoAUM,YuQq}DoK ZpbRjm>о ֍6Rgl,wRH+RB wYĩ4I>7â[ٚe+f./@MhQn B@yya$?]&<2쥰Qr#U }&:ik%GoMLGfUӼud}B7qzh >A9ӶV;{w6}.9/u]߫^nzI#1%i03$!F( $%L%1E#!7e䦀wp|1%6>>,[+Zb}7E)*yE.ņݑyd ˺b ŕc[|}a`MWNYJ#꫖ڰy' "+5t\y]ЍfZZn xyޤDwH**i6ׅVft )q#0_>VP4_NVZ;5Mz{Ns{͜7|d恒 ZvFmf;DLglbgey;Ro q'O̅3d+]INTddT2 JF^rIPg.14X/R8 *2ג hLcSjUZ츷z;i);^E%4e4Bd#!`)y*".ةͻj<T޼x8oLC̄%+k 1`-TbΊPd%#O*Brm/m<"D-B/zg ~Tcmr#X@<``l6`5DCƜ0: /bV"pdE m3dx%۩`8d[\XKtOqwfKjҴMayvM(HE@BlD&Ik,J0NBڌLCN6RMYVJ}J55e5b:+u&Qu|@ztو@č V|L8g%,EeW&,W D%! M][o u":$ 'R0ݒw_p\џu!4Y=ڣQ v%"9'Y(20x0xnc7#HB:]hTPğ'Z?rDXnd4X^f" $"B 0Rb'ZylG"RO\a(3GHӊF2A3d]^ڻdB=? O/Go4mTZ''@lbָX⧹:p>RJ&A\"ն\wxɕT?sݞњA[I  H\9KV<"x6L0I >02/>FE>_^tٿW].;)nE`1Y_SZr,98.);4ZqOx/gO5{V38+Gݱ9]^)3;uJ.M4!x.Y!@\cb4~n=urp[E2%7OZMꥆ kdְy/nԊ\cp떏6ZqTNT9|\{yyq3]w61)"h6]튣XmyQwG~culHR=I0{:Ye$ GVG Y,+xt|՘Óٚ:GlYI%+NIV\HX}<)<(}؃xvuJ}MxΆA;鏟???p{o>KΟh0°ElnK#>[C_6ZZCx k>|7W1Ok5hq$/_ގi,I7.۠OtZ5*U( +D6Ik-87咪xZb Cl?ݸ"-H媁䪹 j9vZbqF߬vtm9 ?[ТȜJhK~G?ǧ#;mԹD!ht.(K}J2 CPB[eZ鍺oY<& xep0 cK[ ΀ Gbmp^:N21孍Bz MWI6[Q>C(;0`o4VFJ#[-^ )ie[ҼrcUzIQZA`k2W;m_`PStǼ d: +7j zGzUW=jiH)&OFL'PdBA,Ɣ,W2PsX|]NVpihG3jWϴ&$l!`p4ɭ\^ci4<4 uQ/Yэm]{M.bDc}+;,k+'\ BJnzOZ;^)[$s@H臷5Z=KW X~`{v'V sEU2XŴ (N2U;p~$;qC;Q&61pҜ,)ss^&FML2dՆ=lzsW*#2 )L >Ma3]bZ |V NU2Tn-Wbe 2朢J#dCUˆԲ\@bϰ Bz}ԻW;e2f>4\~$7G ڟo^|{ɛWҕ~`_? ~#R=Y@7~!Gèʅg$-e 2Ir.Yz~o-{zI|׹c.t,b@FwΛ^wumuMrZS{W{ a٥oڽ;;0/jqw0۠OFә}M `V|vTzI etYxo@8xId|1,}?^֯^1A`݋hfoIy'f`uhF &YWEN6n83bgV $H5.Cոxm@HaVR*[}r;`epJgJdN2͢rɃ(G\R'Y$:w{[`Y̒\eo5r64'7!C!eI? /D~aj{ ]$ƟlT{~өnud.eȔ R >b1L:TL9mvu4:Bz. ،$*jIrd3V#~ g}r<rjy׋vW͸æhak40ZG ZUNVO.w}OO`B&J-0JMYdH"؛dB7!ΙrLH&$7 UH+P,D>b!Ks4wc$,ΒC!ZK%;6*X D[.$JL5NqI\V#~:˻9, Lqyn^pL g'{Ev 0 w0V*U\G;yQ68\l:<P0^))jZ- =kbrTwe7a+8:cdP,ɚ &%IL-X(mJ1nJ`ugZȑ"MCp3/&Hg[r[VKmeIvģE*Uu Y˴&c6DL,I)~Θ ,xG`rCi)a6Źf |-uӅ6lw#psݥ؝]v qa9: Cq S ,He=P} vqR&as[##(6Ʀ)GF/*}FZ}3t8sHgr0֧3yT:{9MQh4;"5C{p=4=:Ғ9UP"sK!>Ol =+5"o'=v)=W=w=[ 6R%0!&T>Gc?S .bp2v[ܑ 1a=36p5x O'O9?TIY:#gwi] RiRC:p4Μ[ 6D0ߟ9Aթxf#C#AAu+A⏩`8q52 G:CaN٨ltp$1Ilp(X',*% )4FV2uU6F)"&&8a.x!GBDX N5rTv;#eR7F춨ԐbEAKTqknMʜТZ 5|8ݘd%Թ_~uGu ΰ]$?+\ٴm2.lܵy^sџ}^v(nw<7?o(@DdBH.9瞣l:┬5s(Hg8 G^pVYC,Z=@i=Tp3: TGTF3J lREfr =TpO>Ud>U+L XKy0 ySt=u(Rpi,52HLߏb|ckj J1*JaM0G3,'w}D3rvoPNM㋸9nQ Tm˘z\D+弶pAg;N(>0A{ KRIT>wjT N# >/=xH/ol;hwپdf[ ={/0c@)s(#s}ܑg*~Lety5eI.`6@~ru5g& 7_I[Lfƅ|,)ص#ٞ\{&6<8$YPFRY5vj|?||&:,|ejˤ9Pgeo+و$Z+ ho2*!ˏ5ֆB5ܫWk7OZP)@i ~e-ׯ ^ `jVsV }>2EKT_&!yy"ǯ' B:/=d&s Mfj>dvMФ6s oos.+޵IޱnLZẻU ?*[RYWjZ]o aX$j5{u˪y{ᷣ3]@V=:85N qjgپӮ!eӥ4nyrW'-9م[ym̓D+F mvDz]s)w>6+ɗ*! ZLU!AP2Cur%KHuH۳#=mћlalO1޶gl{> N  ggs fr<@(Pk(;@hvP{ T\z}'Eq#) |u8*8-ut]zB礮`B٨L.c碮2BT*ҫW402g%aBvNbeT]P,.'#ΫpE׮28Me9ũ#6Z"LվVs;nmYs =bf0ǎw7>;eIg~])g m~?S*Rn{PUv1Y~eoEnZS Lӳ™\c S™JC@GmxjR^#Ă4/Ayk1IyYnxQO(5Snt/Z's͚5`X`Xj lG22* 8 e.jQ','cu͉ @p 1jPk. Z(8GigQG_84`O(k)!g-LH,Bk(|ܮ`Wl)Nn7!Xhw% eH< xz("(Ȕ`\v*+X:τPQSΜ6>12D* `Р I;iY-!%t$0˹IKRH͉PS-xO$`cyD9]hBjyd~2[r3l1 .ЧʩukXdh 6@uFӢro*m^NI@h{pbF2’(pGZp6^MȘьvRa0C`)y mS}ÄkىkYvp^3v^t_|8Ip.JG"!h ƌNJ.dd,StδTqxL_\6sS5Zf[X`ŦxOg3g}C8=x7 +",fߕ~-Wgz֢K۷\c$TX(DžwKo=uX LRt6@gpn"nK8ȸрR$K t(EY|~ݍ-q 1!5gg+_/^D_S~s;XKq2 q:p,"rZ#α@OZ$\cٗ>nj Q*IHjƈrLpU:1Up2:^deZ1wAl"&$Jrlc:OZ#P u:#g7ZWH/ T 2AаTvZøN@3n{8 7WE;c(ܜ! `s6w2TݡL-?JaC`\\Owԇ'8r' ˫Y5 _Dž\Vp9FFZ}:"ѵ \J,/~NIM]1'ylXY6 e+T4Vjv}qkF|_^Iap)e~3  \hAzi˵0<܌_s3nf Nna7{쁗{xͽ Ou7ilzQwbpkR1"sK!>O@՚9( vcMssٳAh#U <mJcD8hYt-Dv>Ϋ=Zn|\@܆'zݶnW[++\ٴ.Oݵysџv(<7?o.7/**nk-өםE][o7+CK Yo$ lؗ ŒȎOgF֭G#Fv%MSjvUb] .ꖁd!,QD'2͞ztGF #]2jeRj4WڊĈD&JA3x.A@e決EPz>"JCG21mCBŔwDFz3rvHiX}2 ]z*V[l@9a*Їp:0UoW*2 U ,z4ݥf/9 Vn ct|gt5>Ñ;.~p䁭tT/P[C+/OgFp3%.i;'#yl=sTE*8 ː.Tȉ ʔ:$.#($PbM4^v heX*``5Vur3rv~N<¿:]}3V;n_(kW#Tn >~[fGwB1$F &ă(/TL7;(4K`&»Fo_H !w8Ge\IWPK|;7/cXT]>uG{+MFP{ S"a,IiOi:W(|NQASTb\D[jW;Bl1_7}ay#iڎJWq?NO'+ [\1ߟφ66 eHEtv(**2V OF@"6EI4B5aT /AJ>9e)e":#0D^*B@Q jCuUy,$*p8 p'Gv߭W<j붺e؝ϧRӴx;J1]ݸ?p>1Ih^ S*]܌n»c \d]wYc-;m%8RhF|P|y9w{} <:pI C!*rM2GeR[FXCQ N\6yP|kLF~BBWA $5rƀVǔ $5ۨ^W90Q(G8 (VYxJe q[];óoQ/jU,zťkYJ5fϘBY^jWx Ҽ\z}o^tnl6^khʏ[qcP-ڈ@`g+DjRz8)3垧gJwzPFe۾30HM~6` /*څ0XtL%.ZbГc(Mhj3 H0F<3ERaRJ*&H6RI&:``rll37#n= )1gixO٭ͥ;_U^$ˏbK{z[oPMFՑBDt(N"$"H7vZKvKMd-6=Lcl$\2lzQe2Fl$8/B(A`{T΢@^+dF.қ)jY/Ah"K\*ZĠY3rv^'1{i ֠gH2 5WHI`?au6hC, R/n"KAO0]uEY\OZlɡG !dY9E#jr6It!s@T4Fd@9ldRRkB2Q9aZGDqU$B3x`$l !xmZ$=[_3"׾v"f"(,(6Ph:T%?J[;v~V/z/׌E*|m6waK ؘHGL~F,M8첲A6ʤ ӸW.`'E:qCC75:AucqvNy#C.^Az Q6 cl Yp`lEkdi9;cfc}g:zإUG 썱e @iFߓ4]M^z|8Y*K (RŁT䜕)f$[z70Ʈ" zeRR^8lR'mqȆQ` ]B04y+rv,R|jV_k`7}\,g! 69TBX%EHAڻk1$/:!54SnlXWՔ x&&X&e(*m0 33QUh~zya)3 *:02; 3/dQ.$ dRF_|۟kkE OBxC5~Z2f&+ a(HmArߧv-٬LN?<.*sbsI:pjQ~ek&VQYNN>QYaM23}`l^0,Nc.`MJ{"?ymia~\3&kQ́l TPR#pZKbQbJB(0gQߝ+ r/43Go+/buqL8}49"Y췛57.O/)A yQ<7M@phgs+R~R=ur֣'?_|n&1 Lq}:jmuEjΫ"y;+oC3Y?8t<4&i.3,X'XwfO[98g<Ь*w1뢦嗍ԉ-OrMGP*o[ݔ:n6.O~ooRo?|ǻΟx%p0D¥|( zܘVLej üN|yA;{k/@Z:" B6ǠO=tkU<9Np+L6*y-z~*YQ^PW!1À_zw\u%rtMܯpc.XJ1[S /o};M2ܜ(0`QT! :?NNNʔ62pEb>$hHHJr^Ng .%Ѩ{vF?aE~+ːP*ISoH2阙eV]&69RPM*^O|!9zUFY݂xkoy)'{VuHm;jQ 6ʮHCa%BGʪJPp ~Vu ᭑Y!vik ώQ-;jsIᘗ-8ҽc!aEwFR=d^)uwXR c C N i"i|Bo1eu`K@\ARm8"H{]lw3|*.M>+ɒ-# Xzr0\m^^^^{~~q>i*yA9~c ]mχg^szwz/! k{vGBP 8|qؿꄛU8opF )ѵ`\ 1Q#{)kIJG5+UKW5@WQ Ij)g) %ȵlqzmc!dmOF:2Tkg'TJ8Z7UhF.A Ocы[tt!H+{0[M1B̗N9<>՗. Ypee,:G-N"Z0sSgH/s"]l#JQ'ꔏM*U2YU˹aXP &#kRAzPFJ1lrjݰCQ[W=d5s!ɏqHXu mhP+CSΒHh[dRjB cK*E5װU5@: X򚿷~%,s1J |nlM6#gDGASw(v٧DԌOڮOIS:ոj ~m5< wԷ\lX{1OsZyJ\J"9l Y%œJ!1J$ mm-(JI 6TzJ[@ Ia,LN:V֚9wk(Eta3x.l\MQ>.>z).w/>-r^Հ.f'g lT uV`ժPH:ABqK'#^e_D%4KVUmj]{o9*B;`Hfpf 6(&ȲG3YwbG,ɱi[I:mfU_U.YS>@_xoKbiE~Oqy0wփ*%A1`mȟgsrV`(9/b b6mlohh ))i2Q$R2pzj]0r*& &Ʌ_F}=9gP4b3F8h]OFY:D,9Rh%JeOZبLdktt2$duM,Ql-}N@gb ,Y4aJlI+p:&隑_#UG֋n%\l%wՋ^4^zic 2^:xDkW"Yb-SZ$z!fqW}hC{7}*ĊhKN5Vy~c!}-p' aY\$VoިE@RPt&YTr8hV"A?*w(`0c>#jk`h9]tdlu.Fw)MԬVR$TlSZ67 k#x%! #V'X2@YP9_:;M/4ڔi)A3. dj!T ɆT $4eRU9`SJ| AI9܄͖]){RT9E6NRHȂh!'_nz?*paӂ墧 ͔է"%V὜ r6eϞ/=r<,_z1Ru#߻IBWM:]ZNfZ>.2놕K_w-GΟﻬ~Q뻣 |6/s~?ՙ*℣dm/}s#RfƵ 9Jh!w8>?t` M3tpxlZ;hfn~"Js Q[J]!a\BRQ8#d !a v;KS"w[3Ji2Y`% l2BUh}ƣ9%FtQ6XR ٦Xm+G9*#Ƥ~3bb>uƼ]v@j٢@E АR }\d!>x*!Fz„.ef]E#Qk6 sp֐TsBZ:-uY2LbF/< dёIQ)EuN M<ֹZ{]WHD}٘;noX=Ϸb|u!o'zq]tD*\2t픧Y~eYIE{Ł":uM[HtJu[rRn@F_U/[q}  >  Qdvp.-Wק퀴i~٢rؼ Zd25@LJ!jX"vlNmc9c;9X)!\ݗ' "T*ؘjZC&ekCr>1A!x7g<[{Û5~ꙥ?J|"͋}x+u= ;!tׂ:i4!xsݳw-;nN.{nwM'=_4$-zr7?mxVUI=/67mg:dͣϾ>$${m͟6wm2xy;f7)t,atFM@"`?n3ဌt8#]9)dVi1#b UPNOQ&n)MKP>K5FI2fiO"#Ӗ@ XoluiY9^C2Xfqr~s|{7XV-Q6޴yL60UW22UiOJDf lTcWȝw! ~K^i}/7X@!:yot5>‘;.Um#oYJHDJ+/ƫ݇& h P]*tV y(5)yk!dJd+Xj&b0!8/M)#s`:meG&l85A"DirҪ!QnF~YM5~ز:ЭklrSô|mj'O}Vb,YʘbXgswYFBr*?YS&»Bo_IRhXr=A<֡+(s'oKJ_ v_N*J.— =~R LR5wprSnTd+cTk|PEC郃 P[)FN y,ޅK+DSV)j_O6o[vE,: )e~[EҚjM_/3!X%WjӺ9-ϪC&4˸0[/SG)dJ^Q]?DѡUcTYl3x󈱄hb}F6nAΎETTbHrEddH:(PeeAWYuNkhY%h˰B2#Dgh]0Br mSR ,HHZY3rn)g\qHbf1ӷoJ5`($OYHůB eXV1ir*kb ih+_)['H2A ,C0`&Sllɥ`Kl)'eUV3NG,=/Z(s)H%EcAj-4FH*sPV**Aq4QsZSml )QxJ)vfu)&x䓳>@{K<#,Ӳ"4"#2!xTapidBedҁ")Spv|F]=?k"w>XJn!^ `[D+ccN;[tJTLVM82^6JE 4c6A:q]C7-:AuaMa=q7&]tOW@˭ظ'sD`uXf _6dױ^b+ʑ  ({y׹1O&?B5D N t%PSedTZ#cHV,~Dֻ,<86R"kp h`ai"2<*( K٢O+H6*!C**Y!>3R[AӶ\wxsW NcK叜byQ<Oբz\=Z="oi1:?EfĚdec}mTa~ꃙSN{*,'OȓDzt͘y=5G)dRj-u: 9ǣM|VP欒|w'*#m}!m!ؐ?ӛE4]T򥴯7Sf< ){Yjxf_[k/.uJ Љd~nՊiq$lM̳ڌ~=/!4{szzteL֣ef_{w_w~o|wx28' $>j1Ejӿ/|Y/߅fr'4}Bl{OsN=Fm7[sY^ f:gb]O뉞_R!b7"/cݍViyrJY)xOZp&oo~c؟ DɘA$!Q nr^y~:;KߥlG%%g }4Ȧ6"(ZX4ڢqZ)k3HLV'lF{G<40a+KK.ƾQN"2jaq5 F6bJNMlB *^ٜx&ʻlE}lՕQV7p1iв.>Ze(,GݲR&Bo|>w-* h첄3.転 TtǢϣƉ,>ق33sD ’&ePf.E6SF{t{ZlqϠź<=kov+HyS=.ty2mPD]8BywC,Y-FHMu2S;ǭYTR n %* 6<=pj)k1 %H҆ ں1RimB>dŘjD|i  ,G1Y'tiݚ8wK=&r^ljo]Ǔ/cGq~E_^&#|Rx׾#>z d=KÕl结Z0CAl(Ţ ad/uNA΂ CTB(26YX\ܮm69lMQ(0"˒)ŕC^d 3_ԅd`D[KA6f2V uo  wL(@HH|(9Ŕd)5SF-FZ t[/Jjtŧl j堒L"+Sh*XQEQwR7gZ9NlP͓Ht[;ۼӓYع!LlVu9yX~j멽㏳2pTޢR2EZ/eXar3=\ ,*mͶ$*1TV-CON SQQȄ@p:HiVLWifƮXظ̚V#>*KT2 Ցgtrpt4|t8y戍ֱrk486F\DiecWtVH ͚UMm]-S\aNg$VG+qF8s0  jG{{ۋы \I6@ߕgsrV`rr^ZXE2mlѰ03dRYɘGej4Lci肙S1$Xt..L7-+0͏]#"8"m3>eAP^K s?NQ+X< kt2$d8 Ӷ[Ǻ5ml@K_Rq&RLf+Θ[ ֯u~\3qF3ȸV5$_gY+.ƸhF\qH1Xדm /)ע8xDkW"YbB;#.>.͎]6C>31PAiq85rRPeEEDя h84.ҿϟ ujؒ;Tv){qƠ8^ͬiVe+-=dӥ`R (e4XJY{pѡsIx,LUIBm@E]hLԮϔCa5([$C r@%I`EC3q ǐY< )s8~ jzk׼&Ӣۜ^~qx社|CQ9ob@ UQE'ЩP{g 0L@Q4a'`"FL ^) K1aKb,(!K) MA:(&"p)ғsM.0ZY].JV% u>r3qJoOLqiov rSOg[&,9Do[QIk|j/ ]bA CdLdz6Jh\ #l~FzFyncJSЁY:@jbs'!-/%Pۅu4:Ct5.ո4"O|=:])7B5xȱZRL"cz&*:geŨH |lR A`Wbn?ZJ^ew! ^x5)@xXjbT4Y2O-YTh[$ƄIyx`&WnH~(l]x5Qly[u+='EX-cnh`^}ytq9/y;]O᧕ VYU:rZ{zy~2C)5 p&L(r5X]?ŽN[2xT!Z_/WOnM.|[8+} >(iSM\YQ)] AfW18Xjtቋ767o_s atl6x]/VZOTJ1.I4:/!ZS&#5:Sy dkژHpX9^W u=gLz.L]+aL~]N4S}Svѿ߷U`9Oa?%&0Gjy~$q]a%Bd*^{=Kg>2U(c=4t5rœ!? O|kD^zlXz3uQ=ј,Ofu>v}Z,Sl;'b oyTCI;3{z{j³qr6;z<}b\OVtoW5Nbq*`\-{ 8P o(KXھWʺ _o[bC J&+;O; BԲ~x-\)V W$zr;ɲ0SuwU6">-Rq*t$p1 |sIOLA 0jm  aedԔh׵wF;g>=܁S1 _#+SgjEj6v6wez3&ŤqlB5Rqx$KXڋSP dfiȗe[9K!$&! yMÛ!Uyhn-ö7=K 'Wm)վ6|p*g2jS&W0.IQM!( +4I>a<$xiJz C0{ ,caf^sJdu`H[n g==f'ex~loE[w٦X6lWYF ( lQVqۗ8*cqyx%KfG(8VrAg0Br4iL(0]hT?.ΤZ굺G;keѺbE ZK & Ф HDB('lhK&Td+(e Y ,R@Ҕ M|MflJ J-o)^Fց߸l-]͐tF7hcifnMjݹ8s^4û)?bٻn$W}c~ LOf & a Y,[۲Xr;}GK$i[>P$YUXY75$A 5Rب a$dI(e{d9ȨGU{-X7\烫q{`-_L'.𳬇~(|7[ZGӭtx[zo@egAcvOS'y粊KIl>6ܳqf'4 `WuKfѺb w{v]{;_ FPŻ[=_~Id(:->㥷m|3wzG4Wjzhpvw6/{o_HֹCsuW5S\T[JlH"ݛ1&W./f|)Af XXVB"KLJ:r%]HIWG %HRͅ,&[.bXDB:&ZDmeuSy w.9\G:]yATPbbׂe0++զsB./+>`x@] }ui0'?aj *wm1=^}-o(&lz3ѧeD2ˍ>>F3˼$-KJ_NG5P=XS ,%4IΉ*^2B-^}nlE)`w謏{"zƢ':^|% qƟRmm郿m>][V+I2q̸F (Eh gwkTuyQŜ<ƠCiD6§NiX&Um*&(᭜+JKatȵʐxHC*Mgde %pj;^=dUCq-l;k|&NsaP^mգ)?;[Z-9l%K̠}:kKJLL*+%YVhciPya܍@9VvM?ۊ'Lݹ&ˉ6}+iur;VB)͝dΊ OIh`]T[ȷ-rSApd۠S0)rZhB4^W'Ύ^Sy{.(ņ>m^h8G%%VH% 11\j#֤4zU^g,ϯ):S% J_ AhEچYΗ!;leЙrX嚍7>JKdsQ<&ٞwTuG$,u `>B`AdZԥ\uDsn5=¨J)K9HpT Y'CDF* b 3JU,0 1l^OZ3q_eW61ID7.%\k3* ӅUdI}"8jM\Gb$Z-1)Uɢu\+MӷGPpUfޱ2l).wLk.Ђ Ӈ6Dp|PQ97׫ӄa0׭cLC+HD+e$tWŜe%I: ޫIvŒί_2vEg9!-j)GwZ,3#ؚxC໫6Xd.1g狵vm7 EDj\ۂx_~?}xw~;:uD+0ω)lSϛ 𡵨54Z:uЦ[/q }[vgk@R,7k\:ݞ *z X}Ml~j'-hg;zVZC< tم/q[;um4L0oiJw1[SNiv `C؟QEA]V$5p/0;i2fOį[@ep>jG&kiRcJ.ddW^:G&v:neU3RDosJHeHDͳ\JV:K*PG\&\Eyߪ;/][iSj)$fQ^em-O_5p8^ >0+o2W(BM7(Ih뫸?jvpJn8ң}: )nE{=@J]JTv6j2a  JϳdiDI}2e)j.:cxP!v[A+cwihp}ǃ9+G5 [r2l}ٻG}/z܁itR 3 ]Z> vuW>^pCikmH a`Nll í!S&L^-zX#QRpz{%w|s|tθQs SFwivo)3=CݭзToUta7MB5<HaW`jCg]|+xs+tt'9Ns$ŮP]Q)ш5#>Zk1JB#Q27- aTM\T.T/BJ킴.EP6dMV"b{o̵*T7RY{"(䮩 hʔ Ly5H;Gs#FaV':j\X; hLѲ9Rߠ1Qt,gs;NNhspw1SB(cBX IIDH u7oCJ.%@I%reܑV:)k+jlv-dPwYimCN)AD4h]Nsfd Y!IF1Y=lshlC%y+&'Iy2RK"al"QYoDK?O ܵHr~ơDUwI'son2:ycNmmZ{*~~a(,DDDofn44,ȱU"M)eGr@Ǎ#usrĀW䂠' H)[/KsVR4HǹVd"Fj܌J5,wY!abFl˓Œ{w',Na9Yw~wA&xA; qjrk)RNML%B'tQkx\CW(n(ΞI|) lJ(oTa#dB٤Ɂ`#~QKϦ9[K;r;{}2 Ht\㔧%-t ` F] 5I51C!y xTbTm`85s:2ͫs3N}y>Tq(XM?2"":OZťc嶒#AN-#T4p. Au;[,F@of\FP"T$A& `%RgC*9BH+s<A(1 w8<wCS lb9r C06q ]D?B_c~@SX5|vh$t]\}2pU+UHJdlW \YCyBpEsΟ \qy*pUUfpU4bW_ \)Ы W#-ŅMsKi-> \NJÂ+Ŷ\e`4?v~JUsku:ݦA7`jL^~7ϖݥzsBnhH1{ =HKKaO%̧<<-WK AVJPwt'%ȝ6["9"!4͕r@~stRO s6D2 (S"eD2fV^9+īp!QU*2Ω7CعBUq52ZLƾ*{IV.eBN1'<%Ayy FYn5m̉}}}R"hѩc 1FleNgdJWFsYژ4m~Vڒ:E/d㛈 \K6&8& p5w!r9?pc+ۆ|RBpE:NQ6(СR'9e`y)êe͘ffG=r&Eo6.!-Ec1*]TE#* .Βw#JBu:Єɢ$tѷ|%/& -sPT ȕS$>pd 0R en;nkBin)&^0֭teH&r؂w4d\Yqٔq3Y?)qԿli*Hċ(=\L!D`C!T0^0B.IL3' #dqN !)^xU#>-5$f~C~5Q2%9/>'IZ̡͕eE(-GZUiIFΫ%{r/ޗ1,>rP`u_>_rYiڭx?J<%g,/BH@YKg[l\=Xc?9[, 6shXr4+iXp!eG\FxJ2lEtnw~^!kdKԳ"CF1,VWQh1'4W+9gToGiu_3/NV^'g L3aݧ4OGﺾo-M:)@>=zM7;I犱4-5# 1(ҋe&M'^9>/y8ӥUou}<+A2.JⴶҰ&lo?Y~Qyh%6cMԐNl`waߋ?ggt:aR2Ց.ݏهvl$LD5+¸ .F% >sA Rc vN_Yw5nJ*W˜Jn8{BIc%NFOΔC $r{ U%ԠWTӉd{]^E=PK]BG \1[mͦ$꩗C= jM6G9,'&qE & ]  I:jh]t]"?ҜSKAFNn>{ynl~>N{8MU^Hc+D4|5ꟓ.|aʔu)W°{, pƍJc0K{K鶟wTx\T-v@"ܤy'yZ#^죒%j#gMF{-r[$\]1Rť 5#4{Բ,QDd`{&XnK꒠Ԛ ) ҺYCϊQ$=[&$XM =mWָ{شKj~T=.Mj1ǎb>4IN1[:s7h bn2 cY&܎u~:q@:3g̔$232Ę޵#byN"l13Xld%8[N/oQv|$Ŧl9@VCQuxX"뒩*F^ USQn ZcT B[.D5@ՆFdm$bG#2vfb+d ŇT\4),1q͔fVLtRLJb2v+GڋpqdHu6&+B#ڻg -L4rTк͓zyl._#nw^߸]2hn2IJf/6SÕ#GΘS{g\d]e؈aLZYaQP$ID9UUTj#SsAG2%!a-^{v Jkrod&ndUaaP,Қ gjx$y"_-`HggNd(`vTHTlBg iT1FǶv>A{eEii=f䡠Yl;%沁X!zC^Fl:&0^XPN=1UzBTIa!}& YK F'W3FyLʇ2e) 3RlEɄGU[ƐЕ0'vEb߼8piԷPi ͏""tFDqBm3:ADK uT^~.ޣVxPm2uA࠸ŝchG#bWVGmA0p W֐X&lk(&nDʫ&ŰVc6K3. '\^49 :?ז's {JIs6%Y,0!pXx(x#.,.Wݣ?+7Np3y?G/NxEq—RjIḵiRZJvLpR;fÍǑ]%{m[! xC0BT&G ݌Ul`EzQc'bErs!Sr .0s6j0 fo8+ nr0d[2P@&Y `EBJbMn>YH\ P8;uNjM).>oe%0Hަ/e3~',ʜv` 0MjuK( :GVXjB Bi<  &%\P o*_n$Uia[jiyf DOJm1.X&F+X4e:gk|~Lcz&ym#p@j4-b$e3VXh wa 4Y x>U-eU)#Heg0$Icvړ anZFzZqhdhPEz[v[@i;ėzxSuo<ƫ܂7i VMնJEQjy`%o?J!pLkw-n-N]ä 20 ,zHEyޥlm p1?VYEZlܾGhjg~ݙ))XM j,}C±e=wt уMr=rODzn4mɧu9dY9#jJ9LS@pft#iO\ٹ{}皝6:Ps)NκhEZ3X[b.hr.eMDHSC;a.Y'zmf~7Uqns㡡HҚGU/8QO)4o^lm4];KgCgs!«:x TjbQSTmvߚu g cڥr*Be&CZ) P(P]97ox%ZU0:Avln1!]]_Gן.|| ;fPp"o*.#NiXZQ)!JUsԎIm`1&jP )2UM, 4 Dc\6RM|/zzXcX|IǎIDL\+٠9Dk@g7Β%9kn._{]Fvjہ[6gy)4&ck0L{W/?J4=vPAy  E?CCL|La|iK}FS;t;2JG\NccjJɆuV@%DI`SWؒ_#DF9z-|{o{z5+}>:+Kzrvրr}\Jd~ԥ_O''-DAݥկ>?F}/Ii}vI&{ɍM璉Ogg^ׄz~ݠo6BYhTk|31-@Lou{zqfbHBY g3*d!V&+K@̠X%//ĉb=bݨsУZdU5Gv&`!qFlN&y %ӃX'bXR1բ9Bgʚt !d6D([1'ewV7qvo\EW~w>:ht'J'S7r<3Q/ӟ']WQt{A:edyBJhU0ډչ2'3E+MJG-B18Z VE"0`QZ&ZSޱ!(Xk!ЖJNDQTmX"R):Hwfr]D*ua矶쎮׫]+Uȭ2x6醋Ԉ/'Cfno{MzXnTjMƴc{mM_ݸF;z1gzx/5B+o<o멹yM}.Ɠ웸b7ŋ=E a7,6&`K*ݩ@w`..rrQ yhaCAUcYH?Gnۣ3ሌt8 #ݠV#[1͍u2E Id1ؠDN&B.US.ǬC5l%E**GgT-nG٧@@/6ܶ~oNVK+-STOb=hNSoOM3!FwW׻lzH+5K{Sv| of5ͯri_W,Cq*Cl s Z/QZkp|ǿ=JZ0nj%79Kыd^Y $l Kf?|5 K^VW@i r> ]7Mۮ3zXq{_Xe[b>'.-?d;];m~`6wO.;Wb?6ˮ= Hۻq?_7~:xj~-dMW_x4{ap 7<8{X`rB4ULFcS9vO(V#{jk5u*`1VB((ePUJM.g,`53u)v wﭳ'5懻O!cLh. L J{Eh uY8Z@L+#^?rv&OW{ƷX-K~;7d XbF&-ciR:??.h +RU׸WMgZ۸"iZ| R'=-MYrJ+: ֮!9 9)v2@{zp u]k?p/pp +0#蕮 Ʌ ԮY; WJ-zzpeф\eĮ!Uv}ڷWtåɞA+yaԪg:jv>hW.=%@\!LsV<\e*%#R]+\4 +MFQpT6`b@k_>y`6ׯ1.NYPVh#$+V95J<@IN~Y:xeQ<dڋŅ 2yBA\ %1@;_u2վ WLP=͠ʽ"i#$kDD@q%?NPy*זȦSeӮk&O40Ir wwDA8K*/.Ȕ5EHga|sښ6$5'9N`1BOTx(0ge/H R[ЯR2)JQD8ɿWN\9H]'Ё;C}€}Dk.s'"q>~NcH2.5Vм_PY`?n,TڕIE/ȣM87ɜij4_ a471(),_sӃ86ElV$Zqv$p-:ؼ"7\ℌQ6t(N9_4R}3[I3K8of[Q%2[[C(`,6^q[H]R -|\S~='_?'?WޞOWSe4II=\Ay}N;7nIȻ{:m놵wS*|@A q20 g0bɧb˫>G>^Ybm!Y٤BFrr:/ ^:馮X(F_ucIxGg'ȎG>ѿ磷?Q<<[u30>'un#ae6%Au]KUߢk&].GL*[exZ y; F).g9!yg,75늋1'0 jkoUy%4TQNh;9[rߩkMHIN*ΖթbNo^ w eT`'b$CY[u+%(FW?YsQw+FKs~%5Dr1y')zypւqL+o.תLx&ѴUђmv7%ztN{g]ٗ>0C4SB  <# Zr7Pڲxt;OS<KAV Zϋkwf3mEf6{,SA zaRw| 8Ma! xa B㌊ܺI=Mjq>iV3D14[}QQ,@$g`B^*Ɉq0"*k#bU7\J8scc IJ9' HJ 1 wh}beu]ȹлTnﮜZ.GE>]Ͽt=_,YE E5Gb>)u2ةyz>†ZOFQ*}<@L)YY(S!4&!:]QZk/T넝:iz'ΤM,%Dj"h(X$q"I 4 .#.sApCQ4N#-18A :gDy "ن[#TZgglwI POb8iMVB*$`lyOS"(1#9wBC [0 `$PQ&gIO!Z橏¡kɑ;ȹ_Pqv(fF;5.w3>mU+mkw2]V~-^7<9\ U&gi`\8*.>V‚SJ2iUR6аDDNw1&qse4s$;UU 'z5*L8l :%a!EJB|9#cwJgXglc,d=>).)cgxx7Iq> ǣr#iP)Ѐ1*9)S!R *t /E0PK<:C$pg#% \W) Lh40Q<†UGW܏n: +&h]qǮ6Q`qiC,ϵ@RV8T֬ (F U T`AvVz%R$jhC9 Qx"U ǘfԜ I3rʨϑQw;mQtGq|L ʭ)xc%dBA)`h$Oų jF8tFhG)85IKg\p^-i&[鶜:3rG'!.E#\tlc\=.EAR8TJ 7s/JqÈ4y(sT+xwlcĝx6ewj{}9G0=Bʰ`BHe%L F1vz \D#“ B再{`vd 4HTPJ  ֹ) $(ĕA@L$\TS  -!9S2;GQ)b%+]G!wF0qBNS>I'%jd}\K.fSO5\grQ,oxbQPeI6PɳS4z7<-7 "2@ }F*:Ng:^hy5)4n#8%'64p'y3#O'ȓuǙo!p 9^$gOrDSF阈umٍy@dGR yv(S0N9I PO )*hbZ\ۊ $1#E_*xW0~aِI+zc8&ŌϧuMaJ?>̧gguUv>$;0w~fh~rtI][|(-Pe30v ?52yUHS-q(M` 9S=Uwaن{|3]lUV@LR_QhL܎y;6o;O+晻~A ݶ{`{o;u+o!?2v5etonunnI[d::2LNGMp\ZlY?IbֳBh**߀wyZNJ(YaY 1DCIJ8GR SQyrBAUzeUj{c״dIIںs(J%~*:a$*:L  f4ɝrTқ6Hj/!dފiϙD =Oٻ$Zѣ!R琺kvSkwTY PxXxtJدiR ʐp΢VM o1Hדּ^t$u  Bbfٲ(}2pJ8E)YF #JMQ9INUK`!2t22b>+|?Mz]Pi]Lk!1 )L &}b3}|rk4StqF;U18 <'#IzrAd *JȲ!l5W[z%4XxlB>xl[c`ܓ9sN6b4\6::ӶּTW G涽2Q)RW7eWQJw<^?`}ϥ4=ܕܾ?Ө5y]t|v%_2]_i&0ODxgx?[kܒx]gXڎA9H.ZHnmcs]K^U5ύneC'nu2*i~ߺGtW~n/XѼ}0}r:ͅ =`ͫ\)93{.+En]i$|:"<u? eug>kj-ϣ#5nϦ"e`oF&&Z,^IѾлaޣa^?4gH)EaVR* *!xңmJH/}*p.uN>z"\B>ZWrbm>nsܦ>^-3矟_]Βa M?!ESK:=3ANebh&`uݙ*{-сKB:u} ҵUSzݙW"Y-,iCئKkt^1dbQ*EaY轤i#cm2d@pkż}jp[b.Z/Ŋ6{-h]7D W |8%{}ܮMp ??/":ZaK^>x˛ht`IrYgReϕt;%]HIPҹY-H5B1Lq0c`M]8R *g=$T6G養>0tg)j++٬wrO EU/>/|\8XEWQc_|LS06i-3=U,?^-R.r+SIܠ'V%#()5R55b5CLL:gMxV0^(ɒ E+!IcdVEUFX9a-@`G Yx"o#Fl1Ȑ< 8+Cjl9XiCk\c~ڜ$ 5缧𚍇『4QiT*gӃF_>ƕ 1%ߍFM/ݻuK?__ǟƓ/55 z'>@7F}/i7ta1r!uaTlZעvcZ+|6HkOHKiSר+5"ja*FZ0ɞ/rJU#%2mh8膃|VPkp̙BdK`SdJ4ೕ b.QmYP:"bG#`4Fpe\+]fyP2 xv\ZVsR5XWk"gWMDfոUN,{?xx.Zx=*"$6"k1%MoSNǐOBpãG40oWfzִ7^!Hi/boYc9fO){p|uuUi* Ո"q &)Xl(kS tjs^l8)uzU\\wֺ◳sk9JHHed*x ZPkU=\J=\VWDi4TPUV˾BPW( CFsF\B **wqUz=oL`oeȵ{nm7QiV"^{!7+>]8H]V}BFmVnϓ+N7鬳S'J01wF~F|hyF}G5 #V H|R(Dͷwm(khO\h=VwA-̗d*:E4/{׭,'v#=3:DCfck]C49_[^Mg2yӯ?~+YZ9,.n=Y$%_  .Zw(\\~;PY{@pUSdPUVȾBfWoG\ %WD0`U!WCWZ{C#Q)`h| ^ɫ>|(\. wt-bKQIk2Pb g+ȓJ'7mŗ"֍6RXNZnH}\i=eͦ4=fqO稁턥*|o?Ilj+nOD|#"'GL/sZ3.LW~p0DC޹5d-̡c@K`^8mCtIE|H~b9ocxi6Qmh3`sg6 |̅SSkM\f hI1JƮOy \c+D,}!(Fi B+%z *#,)t4W5:ёDSzףDX)Y"3+*"ٜQ.g4\XԨ&|\(9 4H0w)1]Hriym`ʊ@gb 2FKH@OiG$u{0Pp][sr+~LR5. 4xlOr4RLZRi"2oAZq9Pg5EP599zƉB2A ftS?RRR Y΢M|Fǒ "IvCJ`)uRlʇ?ư] u]{=cSTb -+`jTJHc4vC; mlL TP8T-ȡ G(q6YfguRu3Y-8^~~[Zﷵm4=Acrp?(|[o4)KYVwz1vRQT[|dE3ځBb4vNM7,bQkJ6j!TB, pMWMbT\T}>Kg;M j{;|gǯ=,->l˳}˻ IH±-$ЖPTcYх< gBڳs!\H8 ҠV:BN$ Idw |H6hY`k{.^C(Aجݑv%-O,Q ,O|tFU;nzD9i͚>ݸqiօvv~n|oFۃ4v,>$1#_}!}Pw#={*10(7Ґ=8fdckm&4zf^av:X1[" FBVJ@DJT55XNs7-ݘ;sCPm2uG h#6zmHz[Y`gnl4+N&Whb;:/F6m3l><:`r.D|E Ry)A72đb ŲϵWn{>^J J58W19s!ׂ(J]ʭ5uRع" \1cǠGet+(tN{i{;ois7ց ;6/yx mo_G2i r'?O&u905 Ѫ:` s"4g̍s#07RU][Nc ' *plmE(^ \d2 F%/2@5Q"k f*)6"LBNs6E=f;3n淐y};v3E~ՐӢͷtoq3R7!hGm44eYlpZ=]Ȕ󑮮^-xQdIݤ~ no7Eу^=,G/z~@}r?ͅ@9a5[ W'GH~0r|h鍻孯>~5WPO^e}χ>_=vc5;_ưN]"6/`_ dg_s8zo#d95-t1c8h/Wޱc)) Q*+t _w_."X!(ٚ4x ʫTj̏|RZ)JhR rV8`N.%b7d4q#\8wl}!HS A`q#K&bJNSM;Vl-E^إ=jܤZ7%Gy;? [a_ݶBtfR`(LѲG[ WD9TPaW AUi^bob8ĵb7<-vo^ȿ|Y5ܵ=u/L`[{@8ʲؔުыCƐ3 25uT"erA g+VlیWwBrVFyUO" /=l )x;̇+ۯcD3D$ȴ^]p|(`7'Hd=i[[pہu2 κ冠P̶FN<;Έu`"礕JNJ +oT-*F^ US3cĸDމ; h(ϙ2\28x=?Z5KFCBkɥhf%S^\,}l_(ڋp,qj2E Yu6B]: ~-~ ~ 3h[\׳̓1-t +!">lG8י'3KW8\5&*EY8VX3TVV2`Tv($߸K9,j#SKk1J3$䵷IkbFnF~\vOB ͈'ύ8Y_^<2-nm^V?!M&/ö%8wP(cr`:kȠ185c磎`14g/QV,ߚc=H)q s(zuR0b5=p1φvDrQ%ņP 6QZJΠ7:Z0jcR>`rKNN+F&'Q9!vڰa[|$c(A DV,T5"I4aI W09doT'9tIy.RTĂbLca.37$ sDJD!#d٭Qx)j' D+Nm9.RD<5r b:Wkg:|섞y8;lmI$Vsd0YVby OLڼA? #ǫjrQ5d}岁jkuYgd`H2;H'ihݬh;uZcG!~a]"W_̀W~!VK8+EiPsJ%̬uadobRx;mP`I:pKNWik6 :XE%RlTmZ<{^=YI=m»YI-EvzsB/;~iT.[鋐Q}dzwn/?e5^E Ewҽr2i_7?\^^]t2Rf,.85ݾ5FQm+M5ĭ trU4chI}o~z^< 0}/lF}rEŝ8{ӠߴWo2]dV[׋xv, լ~i@Y4KM8>G1ҭ{ttUw?e]wI (.Q=Q]CǻDixاcދ`feයK{A+WZ]B< "w*aVP}N[ GZI}39#I$THUU1Lj):] !;,Z~kkJSP|t-$sZ[cwmI 6 ~?Gvb]#N)HuW3CQG)qUBH"I`ǙeFHq]g팝aChұ<ؗA %Cx;P'.4tR?ޤd$3mu3l/WJFqA͙?j}]%H'H[o!ADөʘ&!md)THq/kngW;ཞ .6,`Oql 1’ D04 rͫnP4TTt+JD"# FT8G"W BJ(2vgRϲ^6_a:}@PS8_KLTĘY*v,'Q6A"A>,*Э |Z8o Lʊ!+Kp< K 5\;uָB=4E (IQ)"5L.rOp#\S@#))ĢݫGwpƶ{gvt\4ot>MՙashMQ;R.m @ɎD ;&/lB΃M&ۿvhbxzzg!Hd@)xt߳`Aux>țdBo"aLQ(c*|JD!8(V_kUЎ8y\TĄ A2 ǠC5(K)HzquN 9ߦf;E32EaRSi) :lr, CX2i(XC$bXJb$:_u)\w>1dQ2;"E RrHd 3âҌ6$ ' L<~c+{>f= PuDj/"[0=% vրEQŕ8aE#_B؋ˈzW6O-T %Kӄ9a T&8b pa4"uHkj o}߹{`6 .?G\:q!P pw?W$t,Y1K2&.Vg`H2L 3igspȥ~Ϟ"蟿*d =fr+ )Dp"Ibt.p:< Cݫ*a Yi kiIT]ق] 2p4IQsbv~9/uqe2T3{\VHttԟ]^?`HIV˚Qt$eҪ;>8Yv3#7\/Q˹fn@FNh10:f5UcO鲩܍, PDFQ0e3ǣu=ӛ>e䕽p zm+@2/^ku$Qi}לt5WwMkӵrՄW.Ǜ7Y(X:j_.丮_?9j®x#_+ET,<"WP.r>Cb=G솄5f7|hzL/?Nϖܩ8F޴:hc?<6.2KV;GuR'fzx|#pM8+mYو1BqI "esv:.^7uH[xMhX;ɢ Y,!žy%V-5Dkqf(圤 KN^BmUrg2+*jc :EYV/oPHuoU/ t< uV|T~Ի10\8"P Lq{c, '1beṔXfRL|WFf=9TE҃<-ZZ5k9P6آFZ'Hu&eYaIB\>6AB!gh՞:#}v4Fgxnʡr}9o>m(R4tT|>b풉 6o$g5MK_I:i @uk]m_P>C/߇)wynE.|ƅH/y4xtw{v[B|5]iL$Oo'NYz, 2r>59>HjޚyHu~ f k } q>WOF슥K/&\Ô.W[ɶJ+jJ)Jb+pt*qAҊ wall;\%.޻z\zz|ӊp4K{WObWp4nzzm\'պK \, .%Vr "?*N^- XrH#N- O'?la68wxl>Ô I)X,= M,ԌYSrЍ e!H%umh< ]Ɩ؛b 8:M}V M10mhC1\]oUTK-k[J3R2&HqОk`S$1;e$vx-At۽%A{/2f *1,UbW]+!p|W \Q-Ғ\ÚʝĮJjpEDH+7t8WI SWf2Mb 3ͅtT~S3NT=.߻ILJU],pcb7'*.@>HmFpP,]<>@\cRYU邴( mR#&!\uMo2 *SlG.]_LBMnuZ׸0G퇃K\x=ffgTyzdWOaXEZ@KC| BE.%":d<2`hqXx>{@ypZuσP%-'O6 @lRj+\_i:i_'}t+Э\@WO#NV+ة9XCY72$Q9*H bwAqcԁj `KVQbFw[r0biJNcD2Y&Ϡ)#"bTb FQ<)c"gb錝&%`[:-+D̼\p`W_XLAfu^] R >XbyM{ͷv}e繬60ip0olX s5gA+ȗ(}68V$R'#Jy URY,="< "$Yx {`x` KW VYAx4|@ 9 ڨ0@: FQNKp3"8%H 6ZS"!wK9 Q Ƒ3vvnwʹӔAri ;oW 5H-&#xګ:x`PO }ꥴӯx,`acN4koRr}m (@Kc,B{PFO73h=|mc `'*ɨ Xn#Ir] KGN' ܟo{aywau(( qKX!p-X`6>4FLzc0WjiVV]z۵hٶGCrcINQ;}Ah Lv/uIFIt.+MP0t_'1N։ U_8 n6.ob0XG~‡U̸:swh}'l0;C7_izgEǞYe0R.ˢKa2/eԪM ںWC9 C}aߏw#HSux> 0rfM*`SD S!fhz1s3U%B 9rx盳h`>X9BZzOf&UGuچzYn uwǢ Mԛ(9y7//.mHʲNoM_JeL./)Hyj9Uy)2AU㴮Mn4&&Y7* |nO2Hl#`"낿K6`YlX.V`99dJ#Q0Jd{FTE>B`]3IAcYvxڙdxθN|h*fػrLpWTa7}f)ϟ qyGՙRǩCU.Ӫ3FꚀ Tz\7h!m|;(&bD:#:d3'NK'.GW3W) ZA` e:'cpV:cp0Źg8JbQz7oD[ɿώ:gKEk(޵q$0[`GwU0^'w6]Y/Ф_aFE4`NTU]OTJgqI!Furؚl^W50n7Ӎ1~c\jKo^@!kRyPozm~Sxm`6%كg2%omgsXksD=Lor;DE°,L$ޙMOv GH"P2yh!I-]Ei'o BpjA #)enWCҚ|$U(2;ZMp)4ĹVGʛ.MrVX*YxLYO'_rLgY~Hu}=usxy꠸6sf!gH`TY\1h hc15mr8X$JE%$C2봄LVX$UV* pZ[D΂(A ɊL D@bWH^h*DE"xΚv?df;NQpPԺ3V=)LWXD 'SӘᔊ6`mMߋ~?RpeBc46YZd˘-x /VR&"jI9UPJ!N:I%>(sp)H%EcAj-VH*&砜Tt2h8jiN츴'/Ip|?fE ȄDr*2  Z QlL:P$e ݌v>5S]O565U0 c Ht2R\xSVJqǑ]RkFhc@aJ l'%@&A&X8ePӶZw~SW(VܾLjXR#V-:,8׊'02#J!g,\\QL&iԿgC9hg1$DdxUQj0!H!TI.*!C*$U$C%|f `Hm ieVܧv=wwgcM䟗f\?q^ӬΈb4PˇϨЌ&jzdu#)~tFKӕ}Z09Zm̑DPRG'ഖ:|N}:NP0Y%NT G»J و/C~珢b{~SV< }g;U/qϾX_??^{|P43AԪcGi+OM#@-Gѿy8!f׉ gUFuFXt[i{M?7;,OSV?߾盗oO߾[?zKu=ѐzsW~|n4_~ؾiZ5hiڵ|UoѮ-y|>=K7 pIl*%z_ɋ7&GgtUw=ɣhɬGu'nqJ4;ʍT]]2 4# TfVGW"똭©nZ wo};M3\<苻_o?cdt9 o7W?+|f(H.9ҥ)ZXڡs@r:Yp1f8u}vdp&MkO`JIQo9zFiuH++~hdj$_y/>C,[0!i|-uj M)}8^>ҨKsC4-:D~1!SAFN}P?_&LSemdP>mD?`d^;: PIWx&o˟G{@zr ]3nTB\6nY!|z:$okg]2~4%5mVnSFYųe|ųZdu9/&n:x㗿73z~Y3߿!UyǓvF$ `%sBd}aHfog8uE }=y|i@@ ȃNEx|6 ]]z __?A8xa>>Ѫ[:ZP[r _h;dtϹѫ~7ZV8_F _{}|Suh8o [ϦgWM4OQ jmqrJ;k:ޠ|gtiq;gN9bWŮjDm4gr*&0:w >gPY%|BH҅@R+Zj,b+T(="bХuQfܿj%;*ͻ:~:qj|5XMq("VVm1ߝWq_p +:Y4@\ttP!ٜbdC8ίuޡN#ԉ{: U\@HE8 t$ rRі1fdㆶ(9[ ȲPFV.ƐT"u2^o̵Qg3qe Pb:ja6`4V@P)R[[d84|ҡ3B {mU7#IP|"[Pg+LA%D6Pj*@`jlقw3$. ZOIbvdU/ }_~}w׎g\σd|ڀjTzyqւ+oQY)LĨDGUHgaŁP4ĚC%>+j8$*1QQ.Z*v=0!ʁ2bPv 5l*Ĺ3*|a36f n$϶8x}yObp//x<0>/~eqZgQJZDI 2/"M/u.{!+nS;HDg2(pl'8\VƓ$v RG+qq:ɗB̽Ac_>xJOE4i &g`*pJK t`ul")e)ed1GHj#&*k`Tl,:ĹA}y>;TxfG4=9xăGK*gKN>)±(jg5]F d۶Ǻ4ccFK_HRL1!GN&L#ie@t[1L=SW/λq\g3-/~/]3DzPm?QJ#:!Ȓ{bBhx6ӎ]kn\f@ˋcV{ :.(l-ُOhĊ}t Hʔ;L1f+x(Ӭɣ& *?I;U m1 e тw @Y\}Q!4Iʹ'Eb`4^˔(QXX I* l ŢYt|.I)'R :g]:6 dkf ɅTN bUG9"Aj MNL* :(3Ϥē\RL96x{>ʁ-8yoEzC!29['ʑv&|+zz|1M:xvՅ_r;:4JHd9Fb$W xrS5L5'}4(*EG,[d=!>) |TRu=Ϟl&Mŭ%4QF"T ,"2&LIo eP5Gcʹ[KS"8uPB 4itBd|z`<]fL3|r5 ֬1ƸsؑM>> >ܯ$0p8ܯE\%k`s)rܯ%@'N̥s>W`eLd 47`md1&ceyk,6io,IV&QbGru`"ZAU t2p̫G獪Yl.}] Q(~Ju޶R';>"1+BI[tCʹi8;*]lil+ ݾM{wW>#+[ԼTr|(?| r;*y;_/yBsյ^o4ϋn{9jfWewPNI̭3%Ȕ(4w:7o*>-ߎw}#ϣrt Ӯćt)4:Bf;D$˽#G$ %RKB*),d\xH<=ơ?c~Njgw0Unf b:#*K0ܯ"SJ|PCx "$MVx:02ZLRbkK݊ .St)Lqmu'.~5[ivd˞Y!K9?B@ $ٍe6lќ C%R]{[{ۻ!l|D@:ژrOoR #,S*;Co/$Ϭ7/~n1x;R+(v܂o;~~cm)X w0m"~[HŔ:rΖRrUP"_,Rj-WqΙT^ݿ6F罷?Cojab<~KĒ}RK5?Lbo|q;f*Z?(K1.(@fBѧ![v{FKo4L7 #_Eʃ'֒/{i痃t>- YoCլDž]<kꀖ:pL]b>iNq%w!k(;y09t0:4Qs6*9q"2.504"id%r>% ם U/e-Ŝ{cAEe:Ѣ2Et¤c2aZjUZq2xRǓR+Ĵv`"IsZr Q< )*apTY8iryecm2'V!mΐYKN7[5{oM3k 1nKn Ɯ0'p@y>$Tճl)ǭ|զCOTV>1S-IcVhI@pBS" G. $F1KPY]^z,">hGP퐕CF#l֧"M[dcls>HCBl#gjT|ꏕPh5c9ihV=l$9TIPc]]yMj<^#N䲑Yg}Ixɒ6\d>b6Y|ki 69HSV(C$(%.C A!xmR%DI3:Pg˜؋yOfjG'k^]K|B޳}6*iER0oAj|iM\ r]C3 U9R(N%schfQdNS `I"=X;\} ~nE3HQXdqh͸M^pjjF| 6WtIF_XZ)D+Iyf#z*0>16c??4̟6b+ޯ7Pœ >b@[p-bb*9;:1ZJ.Y|;Oh<;J̞%4rt+.1+eƦ5: q H?~l O>YmL6I(-O[ܷ lhd׮D?xwQ#55x2ŭJQotM#ۮ]?.i8-$]|RؠCoOҽibf[s?O~i>x n cWœaONc+.h hޝbpi[גk[u{Kgqjm3˓=)"]h(|4|YsV꼓Z]WZӋVBiHiX"/Ǵ~^#MocJT*4'gZY8}܋So?_~._o~ _}|[uD#0IuZ'h>TGbr#GzL4hi1[S(gvJb8?y7;!֫dHdS Ɖ5_iW9nR琀#Kh]P:zDȒŐ9WR[eZ'6j!Hݳ[G^Zi2w@)1Ȝg $yKPl)omF%ѻCB핿r9BruU:]nnldKiqi5 J t>!JS<3p!k a,B0%6LڱsIVfeާƽĦ}Ԕl{Aۅk]=+.Jz{ƍ@]o9W|ڽ(v`fg?0}%$bwKd$˔-'8fG_ *wӔr&uΝAW:|(zj m/#Q"i͟BS!(:#Sh_Qy6 ZhˑܟZur`/xn4qQ(ZצQ6:l{MOK#9 >qGNOƴLDи 34TtqQqBge1Vh%HC($Nר0%X1$R*Ғ9%c9],,2Q!9eQeႢܷM֌;ܛu`k? ݃p8^Ng_JXR!s )8(4r2DA3`X(*cqG j!{60‘9bK%8 A1TBێD02y^'-xL̓`KSڼڽ=KOrKjEAJ ?3ʚJLIF^*$!%RQVN цsd2ԣ"9kǘQ3jNIli|b얇Q?OHbP%"V|L@ 7jJ }R14j AG^8lrgшNM򠀥,g\p^CQRţ-i[ᶜ/}9%Eĩv(U\,J\\|:hŅTREnjZJn&PJ'+{x x*8TP& Qa.麟xU{!1+n(=`mp`mMZ R6^ GKYiu2J3#+Mp1\ѠǓK&|Whs*1(dP1xJ縨 2ǂZ蔒PbF+'F'TYC(sP1@sPhh ɤ Q娀^y;@Kuʷ֜J 鐢ԉ&9ȸL@#1Qx͡it4ؒAg&/ ~ܢҷ-_f3/r1^Lv,G=[$=w:9gU%,Yނ6D6gjB~hLIb?9|iە$Fmƣz.Yv/wa5f=mn.goihmx)/_f"J{.v6\Bh[-AAUx4/S{R,XNykiT"MJ@J]eND')Bbsڰ'a4qjs4cNP\^zyt^MT4C4@ O^Vb5C)c`<@!/^BXHr<عG?jO(d &Kqf?XxMYs<Y&<  +}aԕ>cXA2\J$BP⭪+|-UC;2$G_cjb~>SOk}>7foU,;ykusճrZ_Vfsܛ -L\ʢx]?S5דFu7m|s)%„ыD5=NQp;Z IkH o9SZį=aR'r8_zT~ycE?|mh(&THII ]?uyQ?(7"Q/7BB -`Y$g*|hYM$0il,'KɩYE@ё h_rκ] b H<1Ak.$0&kѾZ(41PCD!P1KLa:O 9MƭRIolme%)Ĩ5#,hac`1~1rͥvO8_"fJ.ѢfcGMM+lVWn#=~FlD<mF|"~ P6Hh!)jcAwxpV[8Q 4(CS0$!S=USNS @F 5 J#w %' "h#4c!,م&6Bޥ6X|jA lJuBΥ*"} ".V&bI3b'U0[c'cX,R"$LIUF@P,kwȖNpjIyx,I=t#[#!إmegP|!w-$IQw z}j<NXw`LS-]; QWA >FO._;Aϟ/B*Iґ $ki@:]?ɖsJ$Y8Mj , iV76>ۜƥTD(;[QMh'(0൳P|PVkd4Jc,ѠP$K6cKn2BfRzWǓxųW_ߴe3f?mWXҲe9IE%2|J$VVDTu@DZD) !AQMFד{Vz^%`R%I2*Wp<TtaN/{"c-ךROmZCR~Θ [hY үwi~}}{gTnA>9O~n&i#|aq7=AB7 @ѳ\yp=ߢ\̯Ǜ>qŘ[ϐ۫fv~7$kl3a 쀈w#| ~LJ)p4q?;;wQKƌrXw}0Ɵf\fśQx ٫Xx;MpiG/j6A #:à R1&,JW5PwE'n0tY9ߴZB_((sM7&(VNBekͬOdX?ǝ\,wI✻ !UM3YBQog$cz'SXP#ظo1QHQ_;h&v\'&E|".md~|3 p0 e7kpӷ8ȵw_GFQZj_[\!+(߇Ia(b}-6Ѯn񅜎${y=+lZ!KT0*Z<*VLk*qU$o'=q`z)*sXaQh#U[HM^JbSƟsY| N.Am@289ǥ$!\>1 L2gSG'¿LIEx* u_Raa簝]6ɧ:;ѫ#:zݼ >2j@/z#zq͙=wmm~a[?x&@sbB_eS"RNO IQ!ij(%qF5=UUU_=81*U`ݾ(i'S=G{}86|;giX"urFgjEK_״Zr/HF*=.6_*S/M6M2 >XOY좷$a&[ш1(젼6G MNB; \JJf "6\#&hu"GwV8+Tǡٲ_Bq;7a[TlޟpQ7M#⭵߹ŀǛLgnGũ 송CO.gjK:5kINTcLUD6Uh'D c2]?j^?<-xa=ӯ]zE9n?}0͛.o1ʝԨ{yCE-l~u[P= {5HmK\xF;Ԉ+[hB6'}`b+ :ˆy0[8ssZ+* P1eK%UѢDf)*;;ؾbxQ1Мq_P9J $A83:0*6Y*mW$m ȍ F:y>rl&Y'ڢ>C}`F9tJ`)*;uF%^>]\.<nZ;%8sոm^"}9B?//T712"0-칌mp0G SEs,0Zu0JzdFuL@VD0y G E]j8tuU\ܫF]!z%g}49~pc"5JL6la &O30Te0*Cz>H}rD.'?vs!VLi kk|JhΫJ+bB 1kwEц9=3/2!'9R'#z9f;yR M?<\݅s {LN@H|-pڥx/xm^mNOa=i+5EǾWWx?n !ͷ{E{w[EЕ dN&QcfLf €0IXZePx@hc{#<"kGcmrWp*J}Q)%뭭B^ڭɇ۠_[Kiºu =[m3y!1:2ඖ?|#OT{pRNtモv0e?.*/U3]2TUej`8Ȝ)$%u6 ;霶LI >Sc.Fw5fBSf.,'D %Ȱ\bA@v]ͭ3r6'N*_B72;[e-K| n惛/ >Y-v2k"ϑ!D|"+ȽI9%!ߠ  yV o';޾Nx󧩫xNg[6$Xw/2-2]ˏSOԔYzЏ Ի)ԅ ވ(ȽՙfO??gowEXN":''{ 1:(5$*dI^qt8iv}DzPXڳĝJ8X:k%e]Rn'-miyf1d1n +:%YaȁZ0͝geAlzi."B]_+%_g,AsB<`Hfl(49s8܆,#ȅ#.Qca6_x,e۰C8dWKӲ7j FT-v`II)!QRQ } %pdEBWIJWw(9Vb坃*N,(S}{M@y+(/ϐnrTU$%*J 搕ܻDnʷ`dyҀXi&3x!dR#K*9Zk+r,fX!=П0״~^ M8Dzrɘ vQRNCNNv 5jg; 4J$Wx>XSMmo"Z%ZPtֆl,J"_3J\QJ" sf,ir8W{aMbI#юs*/WY0&Y)2_{niA2Ɛy! υF:De %0(9ȜXL`69 Rӱ\WN:t4~~g%D+5L_G ͕ڵ9OF9tH XA/7a^`ߚ?\WS&q#O0|?L״`, ^Kvr\7 ?9H ^_b!qVӻcņK̪J6\ɍ7 pV /W+ng5t3t"OjMwU VȌf$luM v”cv~m!֔j ;4{^ŒcWs›هoWh&-18vmWEj_gun\QFfj^<:ghڧuUfyGxR(Ze'G79DM?Q咪pkbȆh =>(dZ͟pfۺ.Y7=lL&,fed ·{nF2Y1hҐ+¸ .F̂9BR JQNݳGG˄iZmИ}c"p4iNҏ%OΔ3&- !S*_3Bw9:ܪ{]|ɷd#\Xh:JҹP ־P)L߹+\(丿6;(-ԁ棪YHH~yλ. tJspW^z[eX_FV:&>1&PKkt!:2LEIx1&霹,ZE};'3l/a;\t pe\6rsP*Ηm;|g2ٹ3Q>`q?( ,VHn_eU s2F+z;n?N+vĸ_]8 ]bie4P4bdziRص1 7) zπŨXʹ5CHA邰.Ed*cVH5Ƅ kZi&r]:#g iꎆ>t|tr$r\_Jt;X?yD d-V(ucy>4>zLL;71/GtRX=nTpeL+?u`ucoP)crʰ+d >$ίgu:AX7;Θ* 萙c O,kLB&H>k!o\JJf0xC,x刉C&EV8+*ȵ[e>Z}Hf=[CFc?> GCd\C(|gmrN :alG0\fFaU66:Jc4lAzX6^"*=Kh MR ,5L+FAF-UZ$4e,`z3p!Ɛ4z qT$>68̚A9%n۠~XJL ނ[N (>(A|\j ޠ;5ykD ڤ4A qy#jI39L|tbO#6Ch5bP( EYbLM0K)[Pe5F)n3; ᚚC*a.lc: tyXƋv܋|/cw=!zN?9)~qv=m6R`ih:Q#ƒW1Fìj|ƤvzF=NLh3ӫW`E wPgPAǨ)E "s,hI)-E'f6Z{bMBX̒'\*L0dzR 7^0-d1ei!!\\0(:TE0*G$T|(WY'"pQDB$,߰͗AS=ydJVIF jKא%:X$|Uj_=Oϓ:.x̞ŏz8IΓhq "(u<%) Q_lԏRy4w 2@Q6 FK%x}Uf1+w㄄(,ˋP69i'GO:)3@+i=;8(Tv?hZvK;4wyY'Ke'-_5ו;$g+<_4g:8_Jy*ZYfU[8s1`Jaܔ4FlT[q*oI&ƽ]@_wC}aJMUnF%F|lV*z>mW<TWݞ|xй9x\, en%'Z ێjzvO8[^MC[,эһkbŎ:9ޓ:G}#'r^P_jwޑ ˄o_c]71]afˍV|ö;eZ6BlDvsMV3g:FuzUS'"^YAy=:1a &-EJ V`,/FvfH~}g mϸx1@RQriy6)J؀>S#98Mc)ɧTuzE^>oF%<_oMG[W{RN~Ts)W#L\{M_᫁ϛNK`"نE/06}V@cLo>fn1yn1%c-fv<*2m6WTEq` W{ hPr#U YDϡc@lO +)o!(Жk넥>PBI=J`$haZ̜=ϯY4'7-9՚8"6gy"׎" >rJ4K844DDu} ǔʫX lXG\4)c$pwzq˨Bd )XJl3g؇i%PNU;$XH ĂhKJ!G7ArCKa9zDHck#mJuD yE$aFl, Trc4):F4p`()/Cȸg(Fd"$n(&a$@bc#c|E~1־iq>\B[ypbP]|@}_PaA_<@Mkr`LS\3kD~QVGc3L%S#Sd#RfP·}4I !WD*H&Q#UFX#%Ӏe \P efzvMvCН.|hjysB~t$q?>" ʃghIhhlDZQvRVkU*kBLT "u ɩZ3& *yRZ~9G8+l;r'1EG?W"K4S#UC48u7R_,d"5i xFF)#!AQǂ'?YxCTNh'jǥ Q 1„a sFxQ@C'cMܡ) ڠ OKR#-K8xwFw<|H IOU~M3*"1jV7X5ꤓ,E|_ &?ˤ diM }N{fv&}\gɿozG ⌀3:0hq| HiH9w0 IegB=%>EjZ[v*'hc뉍:\f»\?Iރ߸{oɗN).Ng8,\ѩ&Y06 w5}6M?LgYo kvGuş@u5l?UpG}ymw#8ܘhć'>S`ab7S &駓Iw ]-5Z-#|N~ۨjyv"6,`Ӻ?zqٯk LQl$<vxw.}TE o9[ZױŇӰG+w>>JC(_~zs¥ip0&Kc4mIx1r:\fh}J7|*O 'AKf 8|=5$NyℭSg_l dcN :#1D)$cO9@9d6BJQ(\fp$)]^߶IPDl(OsVT9{@o>x5z#l-&L[KO5lxHrZu4!, ,@FhIl4 u+<1< iL'zhCL ã^ʁ)(/0, P(1BJ&F8Cmah,"RHV'S@pr)*gF盿r~* dvTf=_{gbj wQ|eߙ֙a?zvl };մN&,[\BB)p?>ekEK7]q;f^o XռQr{ȩ&];gӴ_rjf˔YGVx-뮹zU} YRU08Ḃ Y3eDLĘȭ[+5)p3t7ڶаKoEe Q6s&hI'k~ ]צE3J{qr 1m\26<}iݍӗ®X~eJ` e@u 2xH9&=erí\ Rp2Y P?I^xwy:+0Bz gZq*Xr(ceU2VQƊzt)QUdTUΪYU9*gU嬪Uu %$ɹjS7giwVq#D?}9B^.!Ooj И߈8|쏫+ד[o<<^cܫO}ܾ|߽6#=lȗkWw4|豮i_G{j9g5^gUʞUٳ*{VeϾ8JĪU5ʞUٳgj{H_!v/_0o nd/_F-X$_Ť$Ւ({Z {jyjh{6=ڞ mφgC۳lh{\֡ڞ Alh{6=ڞ mφgC۳فTʁBjT,TTbi^J8TQ9@4 J> XL1vr x ב4ӥrW_ӗF̤!3ikf5=RgQ KEڰ3EhlJė^F)-<RG!螓ɱ{]3"ț[&v]i?Ʊ>э9:>GnƓ;4[:dK j |\|+?g+N#;ڏWx?_Gs8~5i\IᱪuQΧ+xFΝ8J7+,NwыWg\|>:^3VH8*,m'CRC#SRG [?&`39*Ct B!TJQ۞p[JZA*E@*XI 06bO”(6},H۔3`!e|)/}F  *;J%'( u_3qGfL>_C ųㄡvG~w1|ʹNq'gA6cAn1fCʒ]k"^YFWjPep Rmt=o,|%wӻǷ)[ 9;xoM XZ}bJHeʨ<;:[6A,-۶:#[e !g#Od*HPiK[gwH erts#c6ΓHF |<>9^]אxQĚۑJ9O#J(IZxVXaPXc)$0l n+2M:䐈b^\&-COI5/ 38~̎lAJl*FflGvJ3,lbnjGµ<3ް=;c iN'>1b;4AE6Nc @)i*T0\Ӟ:@IGHm}Y4A`/d%4[V6d!ZȨ|FNpFB׺k+q#v`΋ 0iJ6@䟕gg-D@*LIE c1i K {Cʒ4QE$ UG(='2Y`8o+0 "6ӏ}#" 8 ]_ ur^K'u?ekQqP+XwZARR9Ũ(݀ʹc_<a +t_UڻHp%y(^)Dݜ8:[)G$ost*Ңxu`tNyۥsJCݷJzTGz9%xlA{gKȥʋ**#"_ bޡCL;bXQr: Tv !(Q{%^$PUUE,l.qLIRw>s(sOv>NOIQ{=ժG-﷞R9yU& HR"P%}ʞA#EɓYC"< "T] 0<0( *hE%ai ,CA )HA&g1ǯ xs/:WSZn>c{C~[ Qxxr>FkCCaERDG;i)x-Aa|,O4)Wt~VoK[o?o6 r9cƮ˧X5?iM6mQ,e4v1)~oBz_#>8~>K3$Fmב˃ӷ_5syQ.o`Fd[|s&̚^BbӅ=z1|*o4o^^B~brpF9B\uEEEѷ-k/4bX+!s3~grG'Oח:׍MG+9XPB,ț\ǷxBa~#ig<>KGaLts*43Zx{u'}i?;Yjym\gH}$R!qAu'LCgJ^2FK#;=Ȭ`K>K<צ|gl/9p}{Y>;~ ۀk7TSY<{ywCLe7.ϩ*%'=el1dKZQG|)*a%-ǓعG1?U*.b>*edž9}Fszl׽0 Â8#\\`1QYBl:7?a4[/gGİ7s{gxekgI8~5bm}_.#v'{GDTzOiCwL6!#Q1;uއXܟ^NշW7jv~:aorץEP({#V DICJG|1Y K|E uEcaQJU/9,5!Xoj⤙8;/e = _,,oKY=I)ۋ3_yhxLeErlWRZꢸhv6:9F;* sAK;#h"AMr&6 Ȃ;;$RXsuT&#CPF:TVD* Z6 F^vHjK{@y ڦ8\)謍Euc;k&Ύvկdf{)l;?-ݤHSR4W'Qc-?a8IC6>VhrQ&8K 2bC}L2.ji!]6}lO/.9)hpZt8iiMFHZMA2"PlIZ(i 砬0TtMHtbOlLB.vxeZ$G_cLH'cs"2y\F-P(T6I&J,ʐݎv9)]%DOȾƧߢvc 2!s-jK 2YR8.)5Q" &'`cuԷkuoqrZN7Se=*5]C;9 $ ܸgơU4',W6"0 tz!0e 4Q (`V}I؛Uwȴ< "Nʅ 5Y 6X52 wYc܍t"Nq AHDր/Zb3F%1ȭ;7gܫVf_o%/$\l(%1dk92TJB"`(IZU@ic/2Ip1,TRhB4S <9e(զZ^9 = Ŋ0ԝ us'GȎ?o~~~wy}W+0àܔFy׆ZCs#Zu9fj-r|+v{k%@,~;j g!8_\V?bDx XWJ<Z%LD^_8!6S:Dkۏwn/A$ި# |0v+SHR"x Eo55F0N=wYZ')Xң1 AsBz KW'Mv޷ʤ :`o4V0ӽ 8&#XN:Ri7ziIv (0H@ 21|k0}.\NC+}Zk;=6Y.MvH!:4!*(QD@NJ4٘ܢ%SwF6Urjo3NbW#QPCݥq[ *dP\a~Vwai<94NAZ9~ |f=]t Es1{sK6YK^kViJ߫z]zrMUO1:zq<۫^H>eu^QwPbQQgRp'8!|CVamIJE9+YG9+βlAM/-0HT(J2b'ĨH9К2 2Υ :$IuO$R.5Ƅļ?J9w "Bz*> ]>㾥(̩y :?P.ol]kו^żMŔb cW6Sĕ G6O[^j.4lWeyl髧YsQzccWb.bаrzWโTlA"d^?q5q?;+ߧlȁ`|ޜ ~zvؽqM70jϬ2HWUxN{{mbJ\f!EyFQ3NJ s14<9ߚC5;k#ߩtyu]}˵n6Gl*Uv *h\&wEԊ22RZe0p9w`JU&ww*SͶURFяW\ZJU&XѝLWZ˶JFTW?"\jy Q||<W &HۯN頺hGu bg<+?ߑg(!q)9wєZpY sHK" H,tXދ%4j_q"wzͶ?>!+6/0BWuu .7i^ۊk]!~l0/M޳EPCD2/"T 8^|Tqѳ|TqKG_h:_z5L,!`_r>ChhzVTǎ?q5Va*x4 wm+r6x'+a,kWD+IkV9[ZR~K}smjuw㐃2ۿo66W˕֫" ܸ C.P p\R&B2NcWW3b7wi'@%Hpb&F H A@\VcEA>hHFR&טj: 4Joȇ,٣ġ;OfsWC72eOOFEׂebJo 0'sD[ܧPHnG|ɓ6Q%@s"|4X =h୍c!TG`G<0A42[OgXFW:nJ{Q.w6lv.kwenYmr+?qjBs,K{ލ_; IQ, Q&2h o%k G${6BJ(8 :d-?;霶D@1|2BҘmBeАH9Yj֜I-$gJ:A;S) Βԅ:#,dȆqW/7[7Zgv^ t vus>֡]3߲+$2i"MMRZkcՑz Ő8gEar_iN16EMuR%};X`͝2K37qܾY8GT.֊h5Rχ bdRJaD'Ҁr@7l~Фzn-Ja"F26e%DjAQ<2 $-@Q K`>nm1H m 1 -༷T huAɤYɹΊhc2JmvD[Ql ǟ%?Ep,£h%7&#ךбvF݂ Z \Mqr-I; hj%]$8x>=| Z4 ^y}awyuW2]^ilZOf n1Z?Nn-G5N3P+a+%*ipf/9$0ϕCgSdTFI'98e=L00%YtMJi*(3rFtΰ3 yXz,|T,\R6?͚q˱gX;]Fhv> Ѭ}J7MF;GlcBiN$ (!3H@ q&>Zz\uǙ O!{.2Q9˰ɕJx%f6ꄶAs ª66]gz슜Oq +=jm!yc#:p㌦B)'<,*ki-dZ1$m"IQ&n5تD*DmH C@bjBr^S t}!vFxX ۂǦ(:FD#bq LhI3C9 9]Lj9kxj8+ڦθdS\q8^$R ҕRe?&҆dQ[F)0DpMMۂqǦx:C>-^nkv>qGy;\vpQ2)%{Jg_9RܔB%( `Ihm#}@+هF0} lKkнgTQh<%_D̳HR-EF&ZR \ԩ-sML $}P]\0(*s3DbKvl5tFΚNSBy2|=6mrл`!=fx2_+,!F@W$BR#(K,6D Dm&"@zDxDȼܰq =0<00#+0ob29KDE%Z:Ry ebuQÈ; # !~)[JPGwhr ;ۻi[(ֲF,x<*\%FZ.>6i9A"(Tt\t~{zzސGdN2ZpT3ӷ㰉S{q 18;˽ ΨRy:A0|x%lA׀cc>xs>0xe Sϓd>"08aEۇ$˷=JczL/{o˪E̮t8de.um yAdGR yzP&̿s'$,Bs#F@L!8f!:ₒ6vGڮiSH#[E|Rxh* RxNf/4L%+a3r8M0%y^U5S%/6U8V yIx7vJ H==Abλ$F Rm풆#79Zԥ.7rRtoY%]37X u m]jreT+rj:yy6$Yh8xx1\.bv>MSlp^Loҋhvŷ깈.繖Hqzry#54h6F̃Ik7[z`T.{hnȪrͲ;l* qݼp1 8gD6o#j$dS B{޶, /34 3̗muؒWG`")Y>(+e XvU&Ȋ3t3wŐG+G)MfAszr* ~i[\ aKkMsG!9GPoY<]la*tٳ! utpx/H $'i4 Σ Q5;ﵷPBJL10rվawa=ۭ^'M1r}R+L҂!x\ƭX^]!Z V8}(|v?͸ ýariJG$8O&ZΟ|7ڱ1oYīwϘ` $`y%2H"'%қEBl0~[ma3S%,AB%j !-_ER9D%T'Ilu]&L}9;%N|c;uۮd*vmb/Nf*l6`5h a4b &BfuP} M>nE{vsߏ?}ȱT1l%O [h祗-0E S#dU"JFgaŸ2kn U (u7у#{BmŶ4<{ 1Uv7:.'~< *'ݢߙI|qE-gcWT$d{,%"@l,GEdgQfz_L]m[u(])JY G|i5Z1s@݅K}T]>G]yGn ZhbĀ+kKJ,UD /u*gsƴsTv$ 렌BPgTB% n= X4a)ڧRxTOGKgyt GגxC2HyKjtJ)W䥯֠ 26SddY6vBqu]\r1w JEb}t1P&#le^I4#殧(wuD=! 5N1@ARBbXdJ,RG"QZs.TȑF1,+5B{4aI0 hm W!$ &mc: 5鬢?6K@Ԝ^{̆H(לT\GD+N}"J@ŘU= tKo|߃S̩\>F TT4QjkNUyAo8KՙI hzBGRH{o0*%D)"3 =qU}g8a=8U5u4M4n5TPu )I,C.jN4u( ܁&驭;6CWEO:CMuL|r~kD45*q'=1 ]`rF!q!1D$ߓGwP}j<lJc?t6ɂ58, b\渴5R$r< {8<:.ut 7#)E]"%rXzXBB|; @Yw'f-(3l<䒣{3Pޅ&R4wUPɓIFeD.4n mg<TD"GmiDѵv8k&-|\=[Ot3B1Α$.*I.@P-@ xt.RT#4* Nxau⠱s ?0i-y BEI*H k4F"eFz}Rj L9뉈 b=?hZ1x\ $2چ?DPB"D5PaF8'{Bꖐ6:}2)/o>bqaP+EhU9)ӮhD9i1R%FA1 srw^D643 LmӰilfY>"!F*;Xpq4of`bqJYsOU6kZ%KNrHqFP JECZ:XPd+'Y?l0oapu?%~|u9}S뿽>J?L!`8lai770[po~n%jjo>57bUn6|yr-]ʠlߋw$* c':HMYz8JQw"4$C82T2V,Ġ"Y&ZvM'PӶ?'I0 x$1+# nЖwcو#VX/Tp1IRɕ3jAj}ԽQޑ94E~+SHR"x ǾEo55F0N=wQZ')Xң1 Asz!^+wI&GQ/>΢s:`o,V%[_ 2 *S}K Fz!PK[OꬋJ@ @E*jޖI`^i?jvI87"&aEFRmAϻ"K$+@)DF2DEp6H)QS4"S[xڷnCz"H{nrkv+.8QeeBJ9,Mx~o{peآdbܷ9@e]e~!<5Z[̲(!֨ҢݑJUgRؤݫzq89^Hv]du^ċ4"๎(A=*'0F+Nz=n7)|SyIJG5+^4Ea9@6J FQ 6F(aX #1*rLtRfSWH^$J"~sZcLH{?L+Mx꺟Bg<D]u>zǽ̥N_PܟѺ<+L*&doD\(`"y[h)<~8lhdƘ6}>gg9q;UΘl#X~hbgg`č ϓ:j$99xW޸Y­w~?BnnZ<~x6ZBngVm%7"]T):%IvCIE 0E,\9Q7Q ʼnvMfx²y%^t)BnǢk>3=<8>}zfvy7])5v:_#+=;ʧ9\eo۲>@MYY_EHv>Ԝ}aF_T끴g_?qv=9#h/EoUDq 쾀JҨ􏳳'Wp'^#\&HLKXPE./ٹq @sK]uID/>K''g|8϶|U))dm !J.>P%) S6Yʑ˴GnS|w|lwWdO:0u%__k|I<Q{U/<U wBy-Jg* Tg./4ev']VY*W8X'\ZRb 1_3'6epQ`qRB)lI_J$u)%QR r6*Nfu1_Vlҁb(]<=}tyqwn6ǫEy5cք** |3rӥFyдS+tЈ[N`Oqa;\ݷ]->|{M7g7WvhCۋ|iĝw<m֟ɾm󘘿8j,m,ȍW[\V uח/%k>~%aslX.Un/9 oڼ /oO,Щ9#/q) t@_N{n0aV\9H/m>'M)h>LSyMkWZ|FtϦN3s)ӬO;e~^ |=tgW`v=jz V: ?8O0Żw-r+^u}:6VxʱRmSwuJ+9Q6\~?~.B-V:=O/_>id5ߟ^r͊b(Kq5'/:/}&:7/fv0t;t{:c?~Ջ󫋳-Cu}H,5kS 9dW$Վ]4wUh=i뜉D510r~~u:~?= s/Nrâ7ӧEM?wʫ]vKcOj5%p2EKMj6o-Z6]Ŀ ȫjEͿO wE XB ւ`D`NR?ĶG[[~?rbVq܍q.~lqIu:W[EL6~t1= gߐ#Zg3l{lf'aN6k%?fɯpv;G;Ξ|"X_ΎMu K~52/#-r筡.9)^Q03n̩v5(Q՝1<82\kw~[hwcuw l:lTuصzjw%qQ;C*}e EI:Lw~LfCarౘ|w!I1{n#)?B[cO\zu巳i9IL-Be*ttpǗin/UoEifw_NqԮ vR/qo (CwAgp;vᬯvm?oI'2 grwedh{kLaXv+z NF -jmg[şGv͗)t$m#2ᆬ?T++ʸZ$;YEXXd,.HN2hGЫɫͧ\UHjGJMYE kO~ mw/Ns6dSq5ZMVDV:9$Wd6V;uW1D@h%\(CZ,۔QƆJYvqɒCߠr4JFξ[hHKUsʙl"# G+/!-&PE6JC%'9%PxJk( Zi56AbVcRJBJa)j LFG0)S,yS$"Dc!t̄XbhǬ&otLHR$5l&h`&O^_ (y2[Q ʗX+ F4*(cEWaF&1s1t4F!lM@f6IppNHo=# &/<߅˟b~Rd Id4ZZK Q>.FWD6f?7MmT%*JH(H%)ȌU{'*3@z29ka1c%»sR"v ESpś\GZGWHA:CjaZiifJcTCJ0@Dd/ Sô6sQ[8f 4h,Ɠd'͋Il >$%"lMP>IbF-#|OxqHmp_Tɠթ97Vdq?:d,TɂNvH(ɩwضģ5S"貒A";>lԎpk_^b埌;<ƺ!UkS4)Qw`҆phC$*ag.h Ut`QWSk@)@Zc2DcJI h D;h1#,tt kT%l`CR#mΛI9 H=k+VչBw:H"mR<2GF6Ǯ6 3)H@R$&J#'@`?A j@!oب WPnR\UHY$QRX/'Ϛ[dce!Գpgi& HZI@Y xj m3Uiګ, p/zXPKOJ6LP_5iiٲe&ڋ!K6-h8kynz}iW>,֑L*4eL[ʵ{uc fQ:ip t*"lLn ;4{KSp R4Hu5$޵祪)@UGs6fSޜx~1x9  mY.b0uB:(Ё')S !kzM Ɯ P!|_yE!\d>WIEr R&DyF ahaG!7J5Y1< &F|j g=:P)Z'`ޕq$ٿR X !{fǂ- BUe+llkoDRIbXpK]̈Y26UPЎE'K1 VGF u, (礀:Elj17'4{6/&~llr} ~3Y?dǃMR¨x3bTWǪXo" SW.̻1AyPOrv0{@js昃U1:|1׃ Πe:B;_dfO Iyߎ./ᷥl~ߩSEx;8f}ry!-'MS#76>Ep9dM:W\, d`UXأ9_^$%/IK_$%/IK_$%/IK_$%/IK_$%/IK_$%/IKW IJQY:_h2_@+"Ԃ俧 )"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ Rzhz+^ejn'UaλϪd@ R.KGpKF%@$ NpBK:BʤBWDW'HW-XBtIa6BB#JEB$]y)g` @^@d:A#܁cuX߷Pj|i<$/&߯Y+lˋC>XAg?ƃlH~9܎+_yCoRXͦj&\v @elZo٭ w1X:~Ċ*RkDx^HrksjxF*(/C h 6h[shYi338{7Z#;CNpM 8ƭ'ٻ Y!,?#;ڌg-st췸xq;8u_j򴥣 >mf[ S jX0LpO%TC=TCQvB]iܐθ=Ɍ#DWHW() DDTt( xt*5o+̝HW_{ Q S+焷)]9',է}+D ҕwZƮ$ dƮ}+DihCWMoϘsNчw+ep7HvC{]]=Pݭ:˥N96ڼʮYr'U#jK8/g\顖C&7sZe颽4@mq67XJ~QS^n)ۘb4n s$ӏἕ ^S+^Mp&OlgMg4̇l:&}&@ϧ㸚յ:;]*UWJMxk?,nzΥ?å|KӤBkeKm$[R+}nOzn(]ϖt;Е#zhs!p %DWWT :w3+8n{)F9Y@ )berK!Fb0*LŨ,G\]mx1xQ )b/FTr\U.W.v5%ܓ4Ҡ{zZZ_\JY&Es/P ^-kH_6yiN5um=ĺ}We"\Iro+DOfؚКfjN07.vH.OA톲oꈮ ])ͩTOf% lN[:{tTZ45`MǮmNę7kޜJT7ϛ'ٷիMgUԊB մJc.NfpM2th}DWHWFjgDBte@r(}zNWVk=PiNN4[ ]fyT zq{Di)$Vަ]9%:r` QjCtutQ%DWXt+++D} QZOtЕg3\kۈ]K v9Nh8|y'V,;Е'zhs!pg :kw(^9),d:'aKBSboڗۦ'y7}ki7D;jW K.*BtqWP)EUsU; Pm=R$qw Jlh{l('F$6[]!࣯\wu>wC+L Q*Ntut%b%DWXd dFw}+@ɹ%:ARJTF0k4/b.N6!.9դѥ9 n5WkQw+͚}hHoljm/f3z-W'd!Pq1yϧe^Z ay+MC1 RQ>kUghQPuy0OZYCq \aX8 U\zǢ9<lմ¥8N}l@;*\1"~7Tr}pW mI`eIѱ߭}kkæ?\~ =isJv_yPjl+8r ]̢Yk3HirG.J_dܖ~1Z}{@[k~4'3 {AzjϮtsh1{w(jfOϞN&Ÿ ig1 WTxX^LoxAt̃ބ#ٳ|X,.s.?|譒8oN9yiO݉3f~=][7b&(rVŧC%lJ }}=E Na.9Cc(߉ d~4t zr8.QAMEYG笑;g8PS#xρp 5vWy,, ;6;SXኂD AeU2Ui}p&4#d`XMQDkqݞQE%}.mm70_rjQX&k5<20t~:Qč݊fP=p=at90r5fGU,jUߴ;*r_.aa^q|3kgYEtF?Nx6q/YB#x@!j毼mSj2ZfԿY guns uUY˨:mNף"꬝qк,8cqU͌ "2&dUFV[ QIE,k[spШXht-s,12TEYzT䢆ۚ:`{Yx][ s`pu6g)OaǃG?^Jtdx@GueE](Ÿ+lEx]3L^ ,}URNgMbC"jJnj_E]; b!*^EUX!tPZ\u`G=/;j|2?I1WO{>|;}uQɭ.O8[6,fiwS R۞M.eSG.}IO+Ӿaw:WzʏhZWEJD(Bn6`e5E _2qR,N(Ԓ XATR"Wת1ndVഢ:43 Ηp`P.BA\Upٻ6r$;wC63{b2G1"K^INs}-ٶ"Ӊtı"ɪzw[vڇM8Mww<_t?mN+Gl ڋGIzRPJ IȭtI5BpHT2+b$$ɗ4 F%.A&ێ,CLlZ܏a>KL̽`kqǾP{`9X<(1^py@ 0{76I`~c(5B&͐lHQ,VR ҜNHqE`Q δG]Zc{ H1ɘn\| \<⡩f7<|Ę|“f4I\?aW֍v n~2ޏ[w9`%Rh8KFҿ+8=JTGcVJ;V& g] I ! Nߨӣ;GtǍ~,g˧4}?9$v$.eo[>s1>9BJHx폰ʁ'87ΰײ8j<#d^ 0 Ԟa/ViP>zg@ ytNd Jz 2 {'gT q@*Su]7@ 8>Jq JDxYr#sʗϵWA{j#5 G')E-I@jX/"]]Ѓr+q*'!!.oy̐,Xg>"iw,HolL~Pv]\Ӫմ[)6U㾎nޛmjVl%4\`:Z/|@Ǹæ7.3^-<흋mp~1nMVϐ⎉Yw$te';(K͖yNnqw .l3A]Mg&m']e\lbb\OnO杺-LBDf׳n;gh@CE71PQ$ϣ71@#dI ZlL gYox;/HNf<+) D%¸ȭ>.HI^KcЫzc~7^t_ⰫƮq|Or p5CO)OIkǹȘIn%*S1A`X"$ΤDt)&Acs;U`[kz,%e:+o~uLNnl/d&nDHT'&#֣ >{ΥI6CC_6FcMܖZ`%/+lI{D.K%X E)5d&$p<'ǒ}P-6|O7c?;Y+=3|?/:kgx6j >W1* #g' 9RIhKNl*5"mX$QAe !ʫlD>p$ 2Gq* C~VYº#%fYz)P:cd=(&i=C6eq058a&ikFa-8 {P$EtvF&,BJ6ri<(oy;! Hui'/K cH2p 'b2\,!;V5}\E9QEj`.jz}UzVwgSFuK'.17d4 ޒ5Wqˉ;\*̸Y)#4\$Ey~uet#-{K7pydfLe9Rܒ1PIJ#%/xNݸ{i Crt֎3eIX!i!7ӄLQA={٪ImNLF~4Ï{<HY}AW'? i(fqV4\_Mf@ͫ-gW ]ym^nEκO^7^^(dwJw1f? {ugɝbo$m{GRN8;efyCp6o0fw|6=_Obdu{G-5!u}+Ano!Q% e@"6!_yT.<WqN MfQ;{së_P}_?qa^ћ/i #m }$\]IapcyCkQkho=jMͷ&|q =x6$nF|;췗dKFsY]/vM'E*p=Kʨ MuqIJH U$5bIMVDb'wMZ)g7n(DC"d]Vk&T,̹jdTF#B=Jay{)"0Ǡbk G(!y>ks5G'h-(AK;٫j<,r+tqE\.˥D0]"|^ɫ[$ n,co4r޳$gȣz5 ?Yv3 36D撵h <{o2d8CػrW}Ib< @A`^J˒F2@{GKnhKnFxdUbJdY*2=8fu\Bbxq Ez Hʅ_N"CH"CNR.}&gd%J9Xt@E9$ Fhgٞ>ۂPd*$&1&r>$7~|̀Ѵf ),c`e?kRlX.c$Xc9K hJI[5e@ ObN.H&i~!3ڻNo Zy;:wy܁\6݁.>(Ҡ~] kL nohGDYD-9h սS\sꅔ= *{Q׀Y"J6KgdpfgUI3&x$@B1eBu?\S*"uI˂nP 6nm*UT~vzUrFfɸBWHQĤ( ]&&;&ɤ`ú[\YKIq6}&d`rr#Ƞ#ʁNVgr<fyzf][J)Zr]Yg,H[wz|=;_*X%6kДJrF;CT';˱/rdDv`r0HvF%坴9R*D)Z^)@DxmJ#/5#&hu"GNHYT;s8?8Mmv %-kaCςZq4]ڎϏpnGQYm G> @^l^w=>w曥D(|NM3vL `jPvu3o{@6 `ͶCjPfwoz|p̞Gr??ow˧:c>8>=:^.ޢ cGtŚ/{Ժ ,4m^vv }ZS}rl3(Tuۼu650SH:80em2nV%Y2tqqF:^a[ 2AH惎dmT^( V$_7 f6>F:y>rl&Y'ڢ>C}`F9tJ`)*+9l/%ϬJx\^{b+N(xH/a_츃5`S7L W7@LWτWՌYrm}NaeLUBJ!\4 lRz!IǂF;2w=y_"^n]RAԛ2yx#)4ɞ9_]w-Gzlx/p&/mӥ2^pξnb >%FDE#oӿHik8` 17%ddqW(Ȥg9R^4w??3TΨtVL{ɍ\{!%Q` F&RSU c8 }4ĸYx"୍(bVzN#"W>Fzi5w-^_^e7$~ ɝ#[vn1w&c싡w4+4@JS5ߢsA m3kAga`ֺ K?=bVzC=*y1҃R.7wozӮ<7= ޤaoFʎv,Wi[-H}skkpu,@l4`RI⯵@[kzvH=B|s>ށ7S>x47o.eL`}}gp2-r)KtlfI-߂M %6pכ7=(WvLy:AI߁ i vl:ƨ&m,6_&亗(Hg7)`r\8M%+[e]PәBJatxdN:\rtN[&U Js9d_կ5fBSfr̍,'D %x.1 ;.+U#_td%L/;RI 4eRGF [Rnm{կE?dkQ{kfB92hVdҺ8ܛS2 JѐuP u[Ex D\ Mdk97P솥v͙m iKf`e%BoQ / _2+T].Kf8uU⭨BxuU4ݝ1\RWW+ uUEqPSW?츥dZI nl >\5FxinءF%BWm;u Op[?Rt.XY\#kt _4=&?H?3MFyH9Ź o0cWB3 -}Lݬ?hZvM0ѷo6k|Ża";.9Z0z`_D SSNK~b͟{~ZF,kZ]H*2:M 7Oͣ2fק0$/vv+X6 :6}YÒyp*emj4Lel C:&X)W.HވA/؝_x`~qyݫ]_=-Ak{O?}VE$kHR%]dbԁ\9A@jIEUc:(5"O!I[fL"YT,s%ztJ VNU5r\zAWkd~9 Оr\aӵ>JS;^||+)-w,D;*⪣c%.;!2c<䶗K1ZNrIn4#]B 71˃ר 3A!#c)ɰ2FV3Dzae'FNܠ4`$-* 3٤R> 欨,g9RZLϳo+F1*=~ Y:F0'6DƁĜi녎6`ߐ:A`%1W/? /zS̹IRGJA/ 2hrTIoDjUZa6NZ ˗Jͱ/Kyԁ$Sq9x 1F"*i⨢8iv7>QmLo-ȫF-kvvQ&qdG&}#ciyf179:%0 dE-32 \ d6mwԔ{Kj,\ ;B<`H06kn9U-!<%2Ȳ m@aX97;khRCՍ>HCvj^hU )g @\_$t iŸlbh[&˄%6M7BڌuCz%AA'A @FߋSp> &oj>E N`\Yˎs"h'XRCVJrB,M|kHzltHȓ&#(%@MB;B&0DQ[ekZF|~IfxO;e3mO XV..*B2rX(0Qx)~ !P1$5^_4ap7-(e^%L@ZKj-h8:Ƣ$5$E,QN?v=U["iݖNX7C<ȾMyv̜Ϫ, aLR<_natZ[1+q. IQb2[% :F0B:^@% R]Azֱ\IeAʣF RoW^󸀽jq?8w_ɚ+GSF_X)I"MǼy4*0CL{cI{jﺵy|!y᯽dMe-↌"*]՘]}J.•Nӭ|H ^mϖŒCc%fUYRMA"7~ +\vWf?F4qqM79}]HD32>\nF+dF`zjE\hh]1a$~G{ m!i=j91-Z͋~lu?nܺoa?V]s nVxㆫOخ7IM(vk`kuf/ĘVG Y,;h8;ٞMoAz뎱JV:-_RFʋ%Ͽ&ѻ)+k~{yjM=6T SE<˼pbٻ6$WbGՀq3;h0O !Kdk7XuDSI$Ճ%̌ʈ8৷?|?ǻ~z=x@XX pi*o>@C_ZZCx롥m5{[Y[;%Gq $_>0M?-bI/}>mOʡӗUXQ]\O$BblrĮV\JH2IjM% nɿ(W\񴼓#=+Q‰KX) g\Gd1 $4m2*CHFD[8mCo6O.1Ab.d)8í!y>+^< DP1i/><`gP>3x+]t }LJiy)𓙒F'̔ĵZ=d$wِ'3~p;&3}LSWhv1;ffP#f&s"sMP5(D0َGGމt˸WPg F92dg\IBQ9k$ θgP}]+,stJc}^1"[#3}f*_*U uBʙq}t15"aH K*Whs:5Z'jHJsbT0%(k@?pg(B0Omus Y抅Rnv99`n/5ew[Wva\r уBiZM1BG`y~5>P?ncF>,+NĔJ̀F[ ݐj,KجR (~D/uN; i::,U{0xΘ.g5IdƄ!345"4!$KmD}ZK>2H$ީK6ʨ8[iC~(u!2$0nD⠘Ic!FH3yNIw&j袶KۼԢpM.fVߊR"!!R0XYQE^),%~~1bɎNor9t9h{ɴ^{}̷;ejЛ.}Szj-s2p|LD!2L K L9#3>$m ukka8U+J( ӓ "79i\)-c5qJ5[Xmfj +gZb{ʽ- 2I؞4X4xh|hCʩ`@ :en#@'jspL+1`4_b66QCАp q;FtY4F2GĹb4]AfP`|:晉zk8h!X0J-W9ǵTxmRulo VA2$sd2c.(!'T6_;gx5q-/󣔋aE6?vP"hGx׌Jh):I'1Ms?& RH|.JCrѻ`Gw 4Ovw{~<*_JE#ik'*[ʨ-k/H#lN!IBfE؋E(sƣa #xFXAPQ/=+kG,.<o*mG P}B}(RBJx/ 5]h1/g,eplB9 ޗ qx;#ԒgeyV.cAzmc#xV=]M;kR8lU,'U?wkN|v2h[]ǥEazU 0їo^ nsi){|TU!wf]aF\F)7jqn#ál/w$ӶfU)DtfOf{'vo&16cm 7s꽅. ty1j/OcUwM~+`65t~*zx8Y\L#.,Udz8yj^~m߳ӫ7rd[s%Jޤx7l:׽զAKﰻ0YʚħT2l~q\u+ercPmbsi~HVj Mߞ2T 73Dz`liyk'ЭM_ǃ>?l^/_m6;3I?!ߕOu\o+u<5gӣkut wg:yI:2zS>ߝ'輳3E1 =ku'(bI)ocYibW\;Zn;u7Z.Q"Dqf׽ɭWNbћ1~z?Ӡ? rh;==1$Et 0F $7>hLRQ1 X(T=q]]t߿?}荧IsIoƔĪ cnA EHIs 4*'#B]gRRCu@_І{U}uLAS-#w}* *p<̃- [yrq6m(ᆳ ,{`wS3FYKJk* 2t>L3W9}NdRɊ_gtt>io4.H=%I,h%U>'+jlQ#ӧmH0LU/EHw%,hYu} CW.uTYrΔ&(jBJG#2p.c+`P׃B}\$zk u)U) EMqLڹ쫉.M2jp4ɒ%t09mbd9YR%Y>ss&&h} lf(}ُCԚ0lV13ʐrd SN-G,DZ щ aNlJPQ24dJRˠ$,:6 XF/:{~чAA#^n5x!C݉Ƃyd6m; ہ hɲIֹmGZnΆN+quF*e66s孊>9n!8mٜ9c'Ou/=$P*Q@Ͷ=+aW`; ɷsC\_`fP1HGQcI'rmP|qu:;εs,ȨX0d-Y/BE-=)FV{5q[q3l>dE3{^)Vu0y,YJc,/eC)( 4.6J}Xbƫ 6L5'}4(*H,o!^imwV=hMʚ 6 [) Rh' )`SJ 1drgl{GL[gH`r60rHcr-ܛ9nl8dt7xykݣ{f~ѓ;ݚwN‡)]b2DSE̪o+p%@ַRJQhK>Rj+OTRe<>)U]'ACZ95 _߮m7>h9qsIL}>Mc39=g@|L'3:_鉐.Adrt; {y44=:]qK,_y]~1Fտ^h9ܵy=m㵮K( hmViռWkz.33_Su? -fKlҜ\-sIsjӜJ'JLs 5}(U0O33Ħ UhѕTBQpD[bU@P 4`e0U0벿)[˽4hG6dm–!{{*EWoj`v(mTɣ12#Px!ZrVC . a$ssmgRsj2ⲧ -\ZeRfCjcZlRRSVaЍGt l^n-5ԭ޻w{ )~'߱ifX7?\&9 =Xc# Fgnv"ϳӮ3Uxm9]D?O|Z@ur>m֮>:?mW~vI翪R9jk}!}2z5̩H?sq?ӡ@-|M_}֌:XzmkIŝJoOӟ7mT[Z*5Za{]F^ vAr\.^z^xetٌpEȭW(PjQ|Qpu 0Oǻ\pEjU*,:@\)+ 5 XcD,!J+Af+W$ X-~U]" CF"]\sWrl{ v+9}F|"\|fYcTW+*#\]DWCb6%YBViDĬƮXg!ɕ X\.Wa0Z 3A/Fۻ'WyfZ'犯J?v+,zn}nFJc%?Qv3JvKDke/i\Y޿4+lޮڥ4/MQmm38}ٗ >%lӬ|NgMjWV:y+|놼aQ4VTRXMCj+NTR,2xQlFf$ׁ%6cJ=6cFc3d+,W,7$Vkq*=\ :p%*oUn}q*/:@\)# V.)?m.YOs5QWabKR:c_t#7Ow?MO~x2/ Zr dsE/O?n|-d~ų_n Os6M #h~\&6tD"rKs8#׿ͿvY@ڝov|͢$/|__4t..=cgUꮀ>pҐCמeߥ POGsrgG~.rݾjCi#l&4ϻsB U?qXSj֏U#JW]5M)3 D$3j-W\"BیpEAlprWv_azD4W+@N"XW$ךlARWrepp>`V X7[W(:@\yiU v:5,`.bnOޕҗ5Wv<Nᅱ^?{{bϋE1n\UʑyWdճ-lep>rR 啑k/hDc!ZzrÅS]Üe]67_ fe`)2X.\ V+q^\.X2d`5#\`!~rWTڱ#/ 9(@!XWUCĕNe+;G0X.\pjJ W>#\  W$W Wv_i,3d+ ^pł\ X-q*ej?D\9!s Y3B>E5v\ʱ%* \Z;v\ "\ ^ _,{W={W=fU\v= Dc-?[Htb z 6aZ Yc4LӒ.g6z ޮgdVׂW9|Y5֠1Jadtr./znhA)Čbd61 *ʱ|e'1V3re6bڌWĕi'm.bjb+W+M)$FlprWJ5v\J]CĕQGP`u HbXpurprXWmZ rCĕJpEAlpru6YUqE~rj':\\WV>dxWWzîG.2?~qK.&2U?jO\< \Wz@/Wb1m:Y{1xs@ khD\{2ahɳ@Ky,Xl L.^50v/UZQ2p>#\`T XZ9v\J Wz)r 2\\ruW+ XǻRNpS&bƎ+V W-TFb;9XcTeqe2 vzW։{Wq@ˬ9lAVkF+Vwu@ӜƮH0|prM6%Yceٰ͑@7 =8{)S?ndiL\v=pFUXlpr +Vkq*=\"zdfp98ʺ(}ڍnZF֮gh1Z ,i2iLb5-.ѯ;W2 yͩuf$='o?}ҩ`l&Ahi(k Xf:7v\ʱM\mWwf)O*$a>H̏y;zrz-&=6PӺ"ꏋ'??0 ?ޟ6gd[b~/s]GBg1t?uH݋x:B>+\d}ͧ$z霛vF;Pܻxf]~{-n |;j ɼ5L񺧺x੺|NJe73]#BQ@r s˲>g5g\K:Otgt'5tS?_><%s3rI US˻DEu167k] m8LH@mtk\SC^r']l! 5%>l>\IÌϿ[SQ3/ij]U`CkWk+ZbNM G izH|@BM~OHmQc:Il%@V$~1u4Jj~iU4uJ6Hd{A >YKb]oVDjRMm.UN!ڦ MCN7M^1jd 1xF4kҥu{r}~vyJ.:[j[Qj J?{֭]O-Zy| (E&&(Mi\@{%yFsQfƎGE&Z䚦ܔ)!'ZkG,zd4vcF4cv>alla{-`tUsYP};"Z#)yタnuu hQJݣll_g)K"LEt(.cPa2>|c,0*gs=ΩL㞽c7*>#FSHQB>PF\RY_˿͛[V1ڰ^TU2|,fg$usY:V:yS3փsuN5κbU)=\Od4>]wF8g=][unrlq9o3d͘*OM΄|aT @PU-KB$X뚯͸TI'/eT 9jQ]BNF{6Z!ss%E: V`fYt7h!Q!sӐ=ԆR 6*˂ghZ%;6:P)K(<4&Wn!x<w#wS[K1t,ruϚ`&䎊\0qh(0ࠤ`g"U2Tp)9oL6u_@*A2YR`hq(Hs4eIP5@,3TB=@w/:=V  i0mՕ/h!8Yj Q1qyl"J%4Rh ')C@BY3' Z.kOiІM2X[ D:3acrnz RUƊYdb(QLŃ!BMHȿ2mL<].Dz29۳5+WF#󃳝 mzLZ$L>eQQ XYǪd6Q t}1[4CU&@1 $h6\މO.OrSمXXǞy$X17Xk3xhD4hcc.u%{6^̪s  ѨT䡻%PB*c(F^lwaL(۽,=kpS@a>[]2nl- D qusF5YYn) l\2ݶ@(vV\,fw50è63yhY=p4vf cT2'Y i#j8؆RjW$[2xx Bi4 65fsͅ-jU].i@D!#Fw,*Z nAK$)`*gd Kwm4];T|J[ T DŽVً[Vn+voXVehKBYImԫ Ņ{rO?tӡתZ} F2.ej//_|\bŎR6-VW &mcGjbwK/?x,j؜m޿LȸFq8]|ZopW͛ɉ6?44kV|z\is>vynpxT:=|hus{rބ{7߷m>?=nba-=cv4# ~ Wu.:wٜ@8>y@~nB@'%M'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qy@ d0G\:ha` @AV'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N q=_'P rG@(>p='ВzN08(z(3qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8J19p)hlnO Q@ D H@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 |@nΟqxj:zno 7V׻} ^]]loV$eȸ4;{4ƥqiMO'R[K;N:rdg9:},j+tZKzkxD QGS\\ z(r U71=r5P$hULx֮\s4jO\ NIz*Q;r5z:r5І'_\iUԇDM_akahSa([ (WAգ>?Vcꍣo՛?cKߌF_wXB\Yo^>:Gիvy2p<>rݬޤ2n'q*^oo9Sɔfwm2>?삺:[zi8gU 9yc9:?!hp1ɌtULU%x*)uG< P>:\C I=CbFR&(CɥTc[7eqVg$ǞP]6^mFԫz7?K0)/ ߽߼{3gW޾衟{kO>g A[:c e/',̾:+n2K3 fM,im .kL56+1cU{.sꋭ'FkS[}Ԉ}Ga`q[0Cl8sEGqp^ǹޓT5Yypϸ3{˒yp"30ɛ˳{fxXG{u6vۯ6_`\N֍/f}GC'ya'bWzv޽hIeZmy7[N*An5VTq9)*8{+qxK)c4W3?Ws;g r)z׳11!'4V ߢ+D %1lա;8 yƋ9]|{p{b ^(w쩲Rj{)(R{RmMڽ3EݟqZ>p8nDzsMoJl|[114p9;e:_ԲzwݐSFH^]?{WH/3 L„7bftRƜ55z~[or駹=ruN!o*?7*'(U!7k ɢLO\n^,|iAlh:(u:ҔćI[0Մd}JRRΊw;؀eޫyi+oL:g&kfVJnT`ѪDV焲FjysW qXXo~)Vu.; iܰ⛦8y؆\uj˃cgy!.iODKDvI>I>udE2s4xE;E!^ KV5>ըP^Tm:JC2>6.i› QxSlV[Yg6eU79i!A>NJ뇐\ F8|sJmRv1}6I\9S;Ӆt!""OY1h2]3/^:B0F^"@OI,hr2`!8 љeF; +#j1Dd&Bxpzi "t=?wz4}Үm'ujQ$O/ ෲ`x9C@9aܸ5`SoO* UJwIwpӟkrkx)+%GлR pM<[vu?`_ =ڛ6+EگW0ߗ[ʴ˶x8Cp.puNKCnt_d!޿Uct6te|b:D*r) sH@F].t^rr>p ڿδ}TRa$H^Q2a+^M2]'^K'-p~,?}ŠPJr [w"@hmLI `SNI`;j)Ric*;Goo$|5z_m`' +-{KYo;M"WoWBĖ2/qی~$Alst!AdOE6*3}\t_n{갣MMcSmY"$t"0F dU0b*@23C 6'U }UC$r誟FvZy~>50eo#-J+_i6 x! +z=/wTeGhRIG"l:23/<[p%fP%h>!JVj U|31)fCEGŽDF܀ Xrgȴ1`fVϲa'BNBIakJoM8&u"+ fZ$w2UΎ|Cb3vN*=~^څ '.\Dsf\]$L8 Q*aZPsݱ@]V.w/L!yMœ{/HƂuTYEe&z >I`e(ԪtIdcƫLd9I:zN!@es)J12 z*"'vbcc׺(g[|l] zp/8bFmߓ xcg}$e`Y s84"ҧʶzej}0ɡ0_5y,'̷{8ɀ`l6j )s,=8ŬPH#. $I%(,B_9{,ز!pG5 y,w!F#դ6͡&MdY0;p)e93F& w.oXMCz%M&1@Ÿ.u=ggp7yUU8D.O.uٗ "(,iEZ#-$wMl*ʐC46HFRP4gt 6:59t~Hf^N7y2e퓵QIQ6DJN"O0 1'HB:}0#PoGat(#+ D![oH%H-UPܗcKP~IY&Ʌc[̺g%fKKx N̼7$ϐuy' 0fE?ԑː0v\G.?(J0_SiZ fY46ri<:2ңl eҀdq zA0cE4+9$zۧPjg<%+ᎌ֌ٌga{Jd[iotN9]&$ ԋҨ0dco f7M.')a;Mknzeؿ&%I4qK@1N.ј-%O&" >M$/5gfQV.лc%tyشaNr6OڐI4s/w#"0]7 wooj?jFH죸S+rIvĴBc.ӟWtV.OnTK9`ni޺7k^̌ckuWӳVd.0gfoW EDj_mwg~A~mIٍ#Z?Ŧaa8*Fi8je`rO܌ٿO,N8`G],rӨJ"Y9uRjӂGXʽb?G7ޖcNIoTM1?=/㷓ӏ߾?ϿtpaO~NLS,97Mae7;_6 J[ --20Rx%om~\-ICKX5h{S.nNYUT{ MbVw?y~gVB\*^`+B,aMvmܗ\[[udrE!18FѺ*It%!sHҌtnQ)JmqXȼ Y>0w@)1Ȝ?g $RyKl)(+S.vrsP>h?z+Yt\Z:a1{+fAa2E֚3UQk t+ל>]''&q D暠kP"D$e|}6-VuIhOr6FI ҖiO28L9sEZ(ʔ4sW4lm-ʇmNLz1>>*(7L5 JR(OȲR/ \gh2Ykc!#6LIMa I斄>*PWEƽƦ}Ԕϝۅc]qIP( y|!) +Zt[[Sa.y574lw׮GgMϵU|rяߙ_Odz ߿iȹq2>ta>0xN P jA;nro'g Q-#,>]lXUذfSdVżR9g@%Nh"*MBtģ7ߨHGd$06[*|" aܻH()Ed%\8U *FiF,189uPHYBCݨDy "líNhbpHd5Pʚ t+ȥ & @b *wӔLAk  T:,Zew,_g@2>9.h-D<NŸZg8"8'j18[fT5E+}7QE+&';:±vޢϧszW5Rkߨer^pk1-E%Q$,ϻ ƴLDи 34TtQ](Ԍ8siQYٸR+4s$U 'z5*L8l :%*I% I|if,yf,b+Yυܶu֌[/ kǘng~>wBeGh8匭 )Ѐ` b:΀aQ%*|2ylǙYM.$LP m;26ҥOfKydḶ`KIDZ6Y`7gINb\[(Hi̠fRSQ+WJIHeA`zp5C!i'Q Q{c̑(jNIuTtbpڨ_Ux,XL>veD(̈3bψ$> &rt XN5H}T Z Hk &L6:(@8 $( X<*hI3Pܪyyo.#C^U:bR+/¼(z^yqsPBC}@Z)"cZJn&ϐ%w=/IǮ|( ܍@a+䑷yX[E>c{f/(j-zgWr*Vqi\9sdh~)YGKYiu2J3#+Mp1\ߨ%YhsIy:< *FOIDXQ R2$fJybtB%O80s=0"AL9{6n80(*%U1y#ҪyF?gZ#̾rq)F:B$@NsR$%A<@8)g2_^hy6)@IZ*ƀ֎"N +c5 *duQ!G{ zDq9OvID8(u< c)><%6X*g}YɅfדj2g?J(YzԋҖ=+BR XP&C4NH`Xބ2YZ(I%/40;⸕+o;.i~]Ӏ,E\J2\]-u6ED߽_|CftqAsŏrĺbrd7o~q~O`@"k/əȏM_Fâ`24ܖ#`rR UƬ0\qhRCA1W]l Xn<0Wwtwߌ mTtȵnBMX=}O=Wf\di ,{/l㛖9>J:Y-g3<ۿy~lo.,fy^m/Vw(l4#yYGґPN _MRyf1X:.WǠk/ǾE~ 㷭^NS;dro)jM˱p M㡺=L| 3CEsIw]ӱx39\Kp5~jix`HEjj=/\X= &oļb7q=^4֏xhY33{ͮ{7M-W4l sRUjvgTzODJ1`;孥Q%Z4))I߃)>;2! :8+p$$L&<>:IǜS){/ya=!,>]TGxlzI ޲zsU/M?0I]׹+ @%ʀt&UMW[{b>Ll%[s ؈Dv2ZztQ2+ KU6 BBW-NW%ۣ+%V])A U+:c f^ +DII-ҕFJv2\*Rtet2] Q]/UFEOW_ ]-^##8|`^^׼2:t2ȴ+=]:S.V5<_#ӈrU;/߯9nNgc8gۯ7N拪{s|GTVg wiTp=͚uQPR`fM>s)EW6I@ZXh.P፧:Cl--Xi#1H Ǽ*g}5/S?-@2vZ{G5\//'.|)&헎4's՝xŊobu_XSbVXz խ_BfGul1q]6 2#TrMĴ/ /KBH6cNQYzZ\ Z8+Dyw6 WeЗ`s*E IC21)#knu {Vr4t\\o3mal]f٠&+0R&rP𓩝O?vURWB\S FYReV,'kwLvy^{nZ(S$ 9U QJp>!:-S nsZ5A6DB-M!ygLD_r="ث>H" 3rz^^O`*V 'RVDZ Wsp7?{VŜ6/E@czg16oi4x)&F-N;[a3?moI}P7m s/1EY.>ynzk5K4Fe5x! kh qWOhUx6,z!J;zR~L?P)*4cvuP]*tV y3bpⲔT2YO` &ؤ#sdnIm2lVheMP&6Q*dH{1qZwre c㛮>㔡vOe;p~hGMGm)R% P٥.E;*D*%BuAy(}%OMv `\ѕXKg o #rl^^h|Q rt瑠!|-tC` CZ*)YTR-wu݌5LbJlE6B mI FB [Ö(YWg>7!TBxZW.|H>Bm#yzEO:ϧ;lUh3s'LD.#S,LFY't':I|ut%):LEX t$ Y) !E`ٱrcoC͐}J4D0zS]&Pp#0P(Ŕd)5SF[6"_΂mC Qe (>)NF2d"$UqL¥օZ[QOǴ45-l-JZxNvYv<_ѫV8\U%bVRZ/˰b )LDE`:%A4EFMp&"rR1Pr1a*>(R e+R`U526g72*Ͱ8 uc,T $xf(~ pq1xq>ۡ ,rk4 PJIh3I=:@q#By\Qx5_ b-dT>caNx[FB8;Ny<jv jmG*y&C LB@4 PB/-B1. 7EgXh )k12d*bZe82F̜ɂEwxLxK_ ǡDqی'jN]B{-^X)[ZAH!9l|SD mLŢA31,RbKZzTLb4gwį:2.ΖOi,9Mc\4.5Wq١6"=)Цx^dIQ"px*xlv1ְī8os_сO (I[P:zJionT%ۯ_x7 o~Of\sdz$ ӄzJOW3IR(.s =MJtzKff]Fcug=x Ν?=|?/| 4* 1F =$uhpsg>we vWhw\ߏ~Čs,_hx6h3z= F58am݈gvr=F3.OG o<{} xR]l\}ɿ7)$k;KsJC~oGjV*~qgY>nc1hmF"$ԥT8)r"g\)œA;tIxW؊W% mFd*l-h@!Tt1S(:נl *4IM…PoL=}ҧ1?nE锼tZyܞb^m|n{glJ<=Ok{/kmO1?q3g9oŀ0ҁTEP%}ʞA#;"Yu./ 00(s]ʔEIlA&[R!A K`k5I59B2̘ɧw{TݭnVfj) 'UثȘmz+JC`|l  :+L @gkg2zzڐǔa8HR ufk'%-%aP4_^G "Vyi@&SzHu޿}T`π셑k}Zdo1 rQDkFLQS+=,$>i֟d ǓQ^YR :/hA*%֖y- 7B6K-@*]n6I+qJ8!fmGB_7;zAP{0~'YK &Չ6J _ՖC'f"`]!{{҂|J_Gߤ6u˕aƮcmzuQWh8H;hv;N|cw5 vN2u~ o͟껼Fח`ȶ\6닫M7{E]0z3 TzP8!{0z..pQȹҎ9Ϛ7n^'+63խKAƫd| : b)m<0^oz=nqcj,#D(]U9ޓ.'fjE# ]+mE{G|]g&識2O7BEraϝ+ftž/rjy羀p8pJb')S¾])1 gLY@)փlSN|}8sک1fKFJY@SFR1.Igt/!QWT= ^ftu}y*)͍CS2Iݖr\弰l6O͏z^*zhMro*!Guwx>6:rUx TYlŁI1M,P)ɨà4315ADTT2V$sNHddDN8A^eDQk\m J˰BAwBP^9BB۔T1+^7ֳfg^}:%5;нjegХJT>e!JK_=j>@ONcҐ*M&%mUk _xG \]oGW\Fw Y98~!",Z#߯z%k$[5,3_eXA*d2Enm1 Lk{S3S+Nz?»ii,H^Gr $Q<8(LsTetW(JK'LKR67WY!Cf’d1`-&ŜYa`|`<Qx Uu!ڮ..4mXd] {jpI2c!r UH.yv+'H,2C/݉ogtC< ո,џߤG 8<1.mMJi 2OHM(DJ dyaǎPHIɚ^ KJO@錾Ϛ^R^[fXj,m,q"pe#>{Sg 𬵂c9?-`%/:i2ʐ$Ԡ2xmBN9PdX꺼|gxcjvCeZn_eڨIrl, iXrJp䫼_7F ">zvoj^g+62˔2c\dO#7 h3' 1׋iDL*_ };x ((?hà kdְr{?5yXyk3mu]⿯h8%/oQKѧf!Q6tvckb0 ޜNV]  *|{#X[Kʶ$-5ۛqefyEp8jtݧ|m{:%ZkuɶV[J*Y9hu\\Fxpb]|L+W%"~PooKIRMiWoZ񮚦M +WiZwـЮf]vobmmHWq,?ηAݟtjW<(FWQTWi9[.;>qOp U\\PSB,!sيhzP{排Kd1NeU-ѧ$`1d%UFgQZDetɨQ£7ԧs 2䓥 X+ |$4omFX~c'uwv>_VKvU49L9;aO ) UVhR}qvf0 RB\&o@(P#\\Tpu抇(y0h{;ҷVi#'eQztN+c`v:],TNN:,''H[HRz(B)]$9")YY)%HuS}TX$^ݞOZuu-*)PՄ+K{Z6&~U6 r軷÷aMSh+q * ~0un̟\jtm_kC ӟrq qKUZ,[{cҭz9g>ORzG "dԉ~x]] 8gscΒ?k;wۃ-"q[1ϘL@UVh2Ƥ Uin"Q~gtY.1v`axq¸ ,h-SԊ' 1q89􋦶3Q0eEA[2/p ԁQX\X*+N2U8y?I»; K XpF #:sIs H#XHy=b!⪚,ۻV1yeHLf\0HYnJ jJ2Sl~I*זmM~2FQsNQ쑋 `uʡ*IeCjjD Jg%Nӝz dYKYikk{w0>Q8|ѥR)+g89)y21u)Tfbg{ /5?/t43.?nF_F/㷿b)^/6oNnFX a䢡L3AVTx#g͂xE?MlpIvX &)](K$7%GoڟnuNy1,^ -Íni~VL/5>jڪgw~mv` 'j~nw&WTR/?VhWU&Wѳ8t_7Vi5^zq9z7(GhfS/t^xJ<0n]Oװvw{Xj2y-^wE+ lg|3ߢ W|+jCHaVR*[}r;`epJg1I&]AI$8¥݌OtVx;W3+UFX}p ,Kd22eT1<0d >@ 2eۙTZhEǯaO+(LP , RJ P̶ ZgsV p; zw\3Pض+R2֫F]3K$u2rۭBfꭌcep'5RW`wF]rב"jVBdzJ8!uU6rgU!]QWضB%^]=Cu%Tz?5IyUr\ؒn6,ӣk΂65z7rB&b]@}ԺBZꌵN] seB:(ߧ6bJga2@(,g(nM@#V^Ax8tU˭Ӆ [|FydH Db+] 7;SPB&(Wb1Sv\|:s=Z4Wo~*E/Gr`\<4Ӫ%YGpA-`,t3\(\2Md-5y rK`AxmcqƷ|]7cDq:Ͻ/f x}jVy|ǧE.QgDIv}F|PmU?֑yhu%܄V;dߐ3DfۍB嶝/b+tD[la/k̒~9IQ.Rh91`- ekJO~RmIQ)Z#NEK'EOB4K tҊ U0IEŤͨ_IQ)?)zvМ5ru;yYn.Vw~w_(foJk;12 91E] LLt*`d:$I63)#ݓJp<' `() yB9 m\cɦ$Uz/u${Ӣϡs7}j~K'Q X>ՕJB*BJ'sR,"[;y E2a w'Glk/dK5̢Oޤ@3#27h2S1WCO5FJE-u"Y,8熣t%?v V9۟Enخl֗A&[Jr7"{mݳAh,VgSy6ĝ<1gΐt"T Y}e$w$1f%BJ1Dsb#0R[fyZ2trž &{n\K&J~.|NRZP/8A 9r* (^ Vr)SdBIgaˮ gE-J:\@Z)ERr5g r5 ȝtNjNjwI ~ Ņi%ٗg_*)RnZeW Iژ~ͫMßsX+` ӓU &[.i>6u%oWYe>1q|C̹4xtz֎-7cQV2O@^>3bpǡzꉫ_P7d6nդ(4i%T~>Gٚ J^u]^t62:>?OMy #~p1i/ƱQAFFU[\_~ۇ7z&~{͇Lz}+Xu#0 !Fs[~{ _yל뻦mҵ݀oЯ@9_ywk߷7X[eU.]AӟttjU(\k#&VI7 %1wn(i ;)Җ+ gg"g#Ɯ %Hq) Ͻc^Y>0NhCt9KcHdjbbKp4`T-?yNgox㝯_K'ϥzއ0q|OA}[#Y޲(=_gqy;i4]}tIUyFPHx3jS2#`>yŒ1ũNAʣT{nrKT3Ww-Mcߠ ErAP3+X|hT4 r}ݥ~h۹{\U:M} =nSnH^!4Z5uxM ]>D}}jhdVy7:ゥ4ĖU4;{mU/ y؈ Ip3?U'u؈d8-.56h=>q7|w.$١8d'"t"{dӴ$T`DRx6Ey/˜ %&, JyJ9fGs+ydXDboVJHH!~g#-A@B6=Lt>wpnmgO1[}cb~!vƳ1J[njKW:+.EtAJŒ)K|,UH7fu~Z9S֩vBJGg`D(t I@Pgq`lz-0!D!ee8,6jX$e4zllMVll`wI "5zna aIҋff[ͦ׍瀛"@|OΚtdOꞵ2o/ZeEG9!JBpuĂ+X V2XmIcPY.{aI@BI0(9UU#'8(LQ[M*Ffd j0 Ș1 qƶX9t%{,|V,\Pm6O@ᶰ9i~؏l\~[4j`E5(t0Zi2Q0$cx(H>% 6N0+D{C`.": 'U\r)rǐv#М"HWAAx06F}8 b6YfDd{D#:*z Èj }煐Z1/hr<ϛTt4"Ɓ3uS/ gN X҄Ijd c]qyL\Nvi3l<3.=.qq}=啤3 R@y߅hETd Jxg-EJ+x;Cvx Z8JT_׳pެcձuh7p{?~GH I̓*@JŽ742&lf?=E!Uy=|b_'',%zvO@? 4r뻟DBLmiT&arP&R/ FEϾGዿvNEsHjphi VIhWpvaiy1,oBZXl潎W/%h?J~mhqͧU݂tP~.TۀEP@ݻ!AX\O(T|'L=a$}eU.|lfI]]?3hqBRU2C [O}SKг|W z ZU& O^ 1*8gX]P\ui%i'!|T9F$㑅ҽ`"RSFD0+CʘtMll>ƕ~n]BFvjNjY_o}tiVp 7ʪ_˶rg> M|0ƱbD%IH*x,NGdUcS -P4spM!CP{uEˆYh-1L3W>g<5؉J2j0L=X;(FG'MJ FSi`V&y.y OR׍'?rtA #W/RY ȉ i1rfi\gL.3Bmb@f7?r(IzG)E-}@Dfҗrx&'c}009DBڂUDi T n@k`=bbb.5 IQ=?ƷEI,Dk1vQFAᓒ mqkZ8q=iƞ ,f6Y.ŇMq9{PcjdōVy:9l3i}Mz'd)D RP"7'MxxB wN19t bX.4Z/h83bP5^sn[ZAtK3h^Y ƻޤFWA:.~KO? CMJhriKr8VuC  v9JN?'aqg.+=lj`LW@te}7sJ!̚/:O҇hPFP;fu7?~iӥ9 6UVaf>.Yú譴?rlL+6h$P-)ÃYM<5ix4-WLW0+?yM*Zb8t:;ls{'pAHzhq>nNkj!nʋB)B0ʫ1=YfbC/H;-eC{@ 9ө40uNVƢxb4ŋު$314B .q7ٷ$<_ss2)'zhYdȜVz]/HiwI}9 &98Y FΌabm'58qRKY0 K͛Ou7O,=z{xZ }Dl@C6ru{/ jkI W5uKnUmޮ|o3tt,u6ILRj;0@]kƠLx|-6E5]M杼{Kmۻu][o+B^7-`fp v< xM|II&YVKNlGͦMU.țƘ#b.z!`1H1=dSZB1 Kfs10O<:4ϰ3>tVR9Y+dD%q>\14x54*/^WG`,oX Oq_Hp 2*=t#ϙxH?B\6X_ي塄f%8&x{5Ǯ+ /NI>4:ᆓ^&9gBTUs56DR.@^V!"JG]aB0#/.U2zˈI{q "ED$1Tm&)x.?,jZY4@exWI."4uٌab_Q][rB@S++#}&%Ca|VS>j!k4tb%c[d/9aA_ݺއA:w9 ܁M0/ʙ,Y΅l 8~08[1di[} [}r;`18mٜs m| {۰?)Ȳ3]Q]CȻЍ۞C[ _/~䎌 5ƜL\*:(7Cd'8hǶT Th<,HXJ ![SBE-=My9:-}s;koשs }+;M(i:Ըݹx')LiUL+ɧy:YRb~!ͧ%wZ}nziT{p> ܸm?6BaTrݵ_[έU@[[ b;"3!-YZu1[`K3 V2c$`%x`d :nXXOOќOWӦ( =d󜲋 CL*I)3-T$9xQ*T]#Q6_aᲴ>LΧZj٤ )=avSR™Mw/#91Oܹ"\ԿfcEAZ3=C-sgfY+q9c9M!-OE7$`4^t`b yMZ uiM>g !s 2CpA3dINw; B7F92xsCt9eq4g/껣PχފHhp7Q#kG'zRWgE2{~>ٲgedl%^V鞪tl+:̋WKx x׼4[n{ηw]Uׅ[0[k*<f]?4毦9W~>|:`[s q34_=nciO> 1|1gWڡݲFLc;Jϯ,&{)iU5WX=ёg8'2qF2qF0!T%B*1< Dvz4(KB2TxX6@@Ef -yzT)DnG<'-Kc#8G t|?[ܴ~oڊ ޗ a)l-`SoUL6Jj/i_~QJIeW!ĂO WMS"Z,feq Hă)gNZTBEI+pʨ릴8[<<tzhѣ spP֞&ɢGWn4JyRyҒY$f%'=2#0sYƔ';)Wf8{]Af ( iqh!]Y&*ؤ|CCHSBhT2^lmAՖfDus9ddpuHE&AڨIց 24 dAk2"W8YN1YOAΖ(e⨽&$ Cdhdp-wBH_FU/#ua-3ghn6@ghdoy&Hm.;cf9ewJ=O*lu}E+o[-_?zt ŻH->ĵ_~~ΞV|+~;/i^ti.I-#a<l|?_zz{wuхn/.aȾӉt^]v\Zky_{Y];kv=cNVvtZߝ̺|X)~e:ak&ބƞׇ|~h|&u6Hzy>|w=(+؃r4jDȖe k+" F#;1gh*;^Rσq54ujOACFƑ Qq623cfܱ,@AΫ˟IJt6\[KL>eX1WșsA@ޔVRE8tY C 6J,W cJhr ReG+]bPZy '`@aNQ|lML^m҉rL? ~dsRGK\}v coQq$Gᄄ&*%qƐ,hmL`u5撳HOR #ZryM&oޞI~5ݾv£2x,(s7+kdvF>*I8;\OH]'"R*PW5t}-'de={w2k:M5?N[N>I'P)Aq3)e`fΧư/) D󠞕[س٣H`7yť|lѷ/0eƚ6zlqCR\ +B H1񼑈6Z.)hK0A* $1EbFR\Vd`88ǓZn>Cٯ:6u G&iM8n '暧O˙H\0aL˜ i9כ`DFi=ھ]a0ðx6^^ΟvDZgBZF"b#l%'^ uA,QE#pXD~ц$]3q;nk!9O;lA#svp:oւ'ohl*\݋}}d&W0YpQ zf˜ouEVDa <̽vN] (!DBt!- lXnVfQĎ:xU#TkZ{}ZAB .dHч1(<%g Ȓw*hDk>fOr,; .K80|u{Wu>q 7a;;^,{V="~=Pu _lIom+-^5}|BH bJ3*ݵf9]tSf?tfW!dTjޯj.|>J=OqcմW:n聊p[(nzO\7]؜";xd2J ^S?p {86\%TW]Dlmvqap%#(dq4Odt98#d taCpԒLs!ԍ z^`";=[%i!RYwmIgԏs6AbBwWL#|>DɢDS#GNTU6 %MIL(MYz Ĵa ! F BKFzgl#!Ţ qU*5Izcዣ7L]LE8}ppuK7]v>(#tx댏14(]HMՠXJdD^S+K"4c||&yt6 .mo< S*nZ T l OZ Skjʵ}oZR:u.Ӆ`rD3z^J! \Z03$$ z}%ǼD߽%۟h$ꝉxjOn0\Q,HmJ&IzDe֒t )_{iR)nwx謍W9R588ɇ )ֆ,21.O,B;|cMhoWXR2 /.< ۅ]? aqS$&5QRe (ڕRu6dɢF*M$ թ0^b M:G!$aAC amEʢM&;Lw9_* nj]^XIX (c*"d]Y˄* REtjjG:0)`ζ ۣy&~y@Wp<5RŔ4zʚBPm"aΤ1%B68c_':66jt.nq{ຒJ 66]r9?89kK3mtscyPnrj7n,g765㜗hmZmAB}[ 5^᳾2@FO2ㆥTJR=ڼ{JQ1Yg7^XE$bcP`XW# 9X (+W| K06+*T<KM$m3"8*%Ul85Q"$i0kU"Ș.9X'N'k2~}Sqªv-X)+8.V0K$QAu` uGS!R Vkg @v" YK %{ҹ#7V %:oj7$e+ܜAn>|!t`( ƐW7 ą T+H,…:MY T E[xBQoE\FI6]$d㓖6bѣq!d ,!ٜ&ʪCΠVz;H"?[ZpYmu 'S Qj-4fhbTV**yb8:QSK~g@,W.kk_T.jAIU52ŌŅQg<cWѷ]{}A}pnx-0e!49x(F6Դ!6rJD t"e)(EHTtܖϐ_u>0PIN.Xg,).W+KP|;6FY"(VE`ޞ}<+O4oWO^XlÂ{Hr s}bu! @, qؿ/oъUMVK Sh^d^(D/)eԭ -P'D$_\F]5^[MJȘd*f4>1[{?o:iZsBKpsWr [s5zDi686hBbM2y^Taz3ꃙ=zey=?l3uב\1&kZl TW:8j-u>)ѧ錕B8魠Y%NT G"J``E.~ 6%q||TƳJjvvL8ӇSw> bH3b~&zpvrv~Q; ;Q3VWhZGV%&GPpoiq`~yu3򟭻Wgo^ωq bNḌosܮ9?/jrG4~Bl#~Mèqufy͂4>mu`źG=sx2pQyCnuóJ6FּZZH0tϧʠzo/MYpS<͆AevǗ?~?|}$?z/^W (ךěHX]In;6y6C.z1[kEaܟn|֠ o?~? fAmvn:]l+z\~f1?7DNyq^JiV+ *3 l Xwd+W]FKou57n(d0&Hbv}HƛZEF[ofHٔЙPzhRaꍺ3hoy$*9ANP'I\;hdmR|b%z~~c.;{O|%1l|.F.]GaҭޚYrhƢ5g&BoBsqu 7Q6$4EhI)ʨA%Gu4XOUXELM=MF¢F$XK'P)[`4M6rW>lsAkt~1qV@f=\:/mӶ^z$۝saBnT<਌RWX8.X y=0|a;;:j@ܯj)Q um2Xˋ԰Pъ,mTЭKP#&f"($W H[X`bdRr_9.]]Kz@ݷ{ÎM{)`}W]?<^/|U>/<C>3N>݉ _NYLZE܍\ep~釻Q;?jQ9ӌ墥 ͔Q2+V59+ZD=n@xu~EV$tqeڹ|WO.K䵺a./Z`~3VrZ̍~|2Εs`A]|ՠ< <<07>OV_rR->h"Ι#~Sd#3ޗ0XOs*LndGF #Pr(.a5*BHd$ІRBMvDȚ3- dS *lW.(#t &Z c#3rQevC}[1wT2%J0arY"X2eRhB OZoAnYm]jU|e (!)^Fߐ2$Wr & jglQW츚Q̈́h6p'Xg|cЛ.l NUͪ쨫<9kwt~~4T+n(LeBx^Vsz"*ń+62n[$*)V->{L%$ЗZiu ]kȹ]3vU:ӅqƮJZ^uኢl7HlcnO~hy4q4:0Nm#9e/g_v~~3`. ;ggKbj, IW=3!)jd6`4{jꩮg#V2 SHAa!R :tFQЏK<:5Y0`F8Z>g6T^3A% L#GJfv(q#NZ9v( jŃ|6并ZQ҂3ΚJLIF^*$!%R1(b IhdGE3ǘ+QEQ١k&vX>ki Q"x8!\Q]p1pq,x8v싇r`<#@*+<|?6kNնGgfiMrIp%Ywd^K_cih2cG[/e95k͌}4Ƅr$=~I#Y?xjK*Z,Iy dP1xJ缨 2ǂZ蔒P1V,y! ˬ΀9?1emBwp`2QTfEy:鉶63p08;&yΜlK()s삥}k0G}<_,mNq4W>•xvu!7M>篸BD$RӊLF F1(z ")(uS e i'HKcD T%,ɻ`K` 1ʫd)I TA҇*&h13iPc3),CsT9* Y@h`l nLG<:O.HﰻW;nKWjriBhYiRTJk,^ '[}4sj7,Ŵ%˥]v1=?o:Kտ_7Klb]~z2(/ !!@#{{v:0!̲420!L.7 *cV2Ih{!(A45Wmv*~^ڳ ӻuR]`DC#u˵aB˧&{[翫_Ar=U(P8b52xez1vή{w݇ҫVML6.ߋ&- Z8[X]jQ6lt3{z`Tzސ5=6݅ʪ좲! 8+ҫE\WF[7%6Zn#z$xu0VSs{d:ᖡ<=WLW̸?*v:ZXj)Ї&;l^{p֎UƋLgMo>?X+@BJ_!M`%KExx1'+Q;;Nփ!uz|5mn8l{1ɡpsr ?}l=}$٧OPlxXIt'Sh16?ydkDZc|K<ޓ.|vznu6I,RwvwygV{9Ug&f]歚x=_6hݰ޺3lbݾv{7rK9Ucި<@Uxk,{ rUK)o-*1BI HI`%ayo/s\w u傌 Ó0IR`|P`bZpr&Y3 ^N8r) zY}z_7Rc)1n[:l~׹(zOTB ѾōUuQuKXЂVGəIkpEe;|w,AG^`f<X{_)笿^ +F<dLkd%H9f'Ʉ\]1RIolVC}$&z Rс]فby~ΛԦq&*xzk;YSO6~gG~yy{[E!)4)%BsGwl BKi%Цd Va:"Bc2FiYm\D"(*hP`H&`X R,eb[ҩ1Ry9I6RM1&IpImU?ěi(~P/!C3Ζ>5ګHRQ>Tޑ ?".V&bI3bKʽQI\,66"Cb!ȸgH6D-EjBBX֒ ,ԒXllD66>o)l_&x B{7 a3l>v?ZPBУwiƜہ1QNt5h.]#t>bq%pWsǯTD(;[QMh'(0൳0CErحP*FVGI.R3йzR /EJ 88-N?tH}Aڳ:JrwL%|SJ"ql2j{>;ֻ:~M^ "Qٗ )QޛcaGjcezDQ_}<:? ?zeNgMfK-YFGu*?)>K<y cZGowN^GcT5뛬F냇?ݒ t%5qVB)ECMݔT.t"t5~c*KaS[půKZvֆ26v23kMCz; BB|膜Re.]cY栌o=vy1? +GDmڮv~:ճUhEt4:ȥ1ʦ`J Jgq˭2\9V FdrRG 3{3a+S~h|g y+ BފiB{`׈W:"ZحɄzywGyQ6d%1.XbY& K sbY dΎ>1/$" K2T#@jY`H4,PKJq.%r˰4"GA9&a a:*e޳$4tJK"'l0qvƮ~ܠ\W2ߡke8IL2`/Q:$#EJ#$9(+L <(&cb=ce|)oGweEhd+;DFdBDdr*0 p2DI2@)8H[;d(`~]f,r_SoI296j*CQLVM82^6JE 8G;؀.t4Jn,ktĚd;d^[-nD[$ vi笶y:ֵl⟍PXRXAZFHf=j/x4'c>^de;_E N t% qdTZ#cH81K_nD9Ʈ" Ϡ2 "p>A 0jmf68n?88]=gjx:䥫E9\B#hFfĚd/eyEU'!ѿ>˟WW].3並+ I#O x?dLV׼=l TW};{ui/t'g{+=nunY%NT GJCB7!7Ӈ~?OOEioFo4y UFg+(ȾYz^^_?)΂@'jttM=aBڌ~?␧~ х>-/nFw<}*1Ni<-rl<EjO}Ua M߀Z2%0׷to6sYQA0=u`:O'g끞ns|4^9LumJ꺓ZWVMT2R6>qJG\)RR冪tC4+`k锛]-4 % 99Gfl) EF[4Ned $&q6`(=avp&icOJ" x:$fm郑J@LimzqTگ,l/s*f֭]۳Yo uCLOϽw |w|)̸asq*[ݥٽtwLPru)~BnR)[L)m4b%XPsD=x3xfg;ܷڑ쬏ȪnVqg\]u望WY{K_JЦZQ&bTr(㋴yVlX9@鑨bD4&$*1TVQ.Z*V=0!ʁ2Q ZZkfܮafgW6N]ՠ Un(}[Vd%d|{l_^Ѕd<_56:Xi؀PJ6Di/"cW4Exuj셬fתMm]-d|vm;OmI6Kȹ]ci>gb m{s)ы \I6@3X 9pF9d0A1a1nhh ))i2Q$R--2p[0r*& &ɅIxs>uwE#6jDhaЈF( R%'@ D))[Ƀlu Ni[c]q1Tz&ȒNĖ9abj?9kďį:^w+%_g3.^4o.6"zPM?QM4GV{%B%)9ŨNC/>lՇ>ӇXQmJ4?/訟#v_p2vCLя{zsuo9ިBD@RPt,@QJ9*D4˲ANh**#6 qŘψ(,!"5rNl퇋Q]h (5[l^,EDdM ֨&PV1nFWN'tyW`r"-0gܥcapL%!ِj$FLj*l]dJE1N6R֥Xsl(D_⢅XLaA6ƱJ,,ӇyHYBdzar(&K7 r>%] "JH餜Z}X-#[:XFe4، UoxS"k&Π\@gAm܁ ->1TS~V:\Q]%٠|j۫y>ߝ1gg"!<:?;stt:|oF3K:CDtN 2+4SV[EJ{9' l;Z.=tT۟~#U~d^F7t{#ROfkrWwWY[_8 鐇[v/ρ״~2S V/H-ƎuHwI;$qR v4u@<`0Y$M@B%x%B#Bwl,jUr5&֑` Pa3-Ry( IbfKV [7v6#F,fǟv~H064Of?VD\vnv𼣔p8\^:fn>}:zR@\BԖ'ʮ0.IQM!(P ZDd0h)[Ji2Y`% 2BUh9K8O dzEڜ{5d)zbݮa;٘tc_;̧ΘEi`Xj V)؞ ]` 9-`X>9^⣚* g'L2Y`+\4.j0g 9AE9g!%#BYͬ%cQ$ f“AR>[YD=PLPlY9c i`h=r VO|--f4ݺkgQ*縀ntL3GCGy<,JݧU !EtZr`܀TU.2_za^BGhC/E¨d2۹bh& d; gBZJ~|?dQ9lޅ KI2+; 1R!?M" C ~*~j~2_t"%2$CA^S6YCiKm/|U.k]r>3}.nΜy=+ ެ93KS3L%0y\O `Pxz:@kA h4luf`9oh Ir PdY]7WnM9>f8`XgHEj=ѣ5jzwu$,~.VE}0 9%TUjKl<#y7Z'jc]7NZhӑء"k Ǘ=CZWa:':]Y٬:9\ݼu{OwAϵ/o1o",=?"|&`ߝQ4M7pː6>!٣d,6?mn!qۼRWAyZr6{Ȼ,'?-s4ޯ`MDetjR<Lzʈ[ Mwe+|)LhBfŹtxiE]΄9 C00 zr Uh4-%5e '2]Tϗ=靰D&BJ,+`\YQZNT!LA - L2i>@ YQBpM3 VYi/a#0AE?1Rr]ҨV⒇X ']4٬Pos& A*wʠu`TaJJK.$&ס(MӁvj̩o#^Éh()++xe,e.h1V ޫnb ʿe٫]/DM/K"T ]=2 dIɌx΁UԿF?`UxTKub Ȏ)mX tc3cn{s1%J(!l241-/d傓[-EaNF0"^F^,aMM@g]'3ݑ.7,|$쑣SRYkxBe(5$Scse(m/C ()a],Cu\Tm/z'^ wd\Woi3距;8˜;p T;r; *AVvtǀx1'DWV0P꒡++8K?ˎNԐ 3]!\&R+DM Q·st%B$DWXd >]ZCZrFӡ+a)Nwp Mm+@iIGWHWJIR24B:D +4 6'CWT cKnN]!` RS+D+ZBvtutWTC&Ѱ`T_j64|Ouv Xxs%5"%5w&<H4μ?ӰUY/_ ~5= ކ<xBJRcɡ~>L`t1\B mKs΋\8rBcJDWτ#ϸc-H 2_36yZ='JîҼ|ƫIgF™|1\*nj>OÇ-CoyltܵZfUǏ+GvnW#jnBCY W(:WKO9 ̇ظpUӱ*Fbʙ˫0jiÜ}^k;A67YsL0!/\9xG $ǛٸTٝBu-w#>l*kZ̚VG932fz46BXmPfWTl3Dkhm3Diugmƌ緟N<W@ ]!c% %Y] ]qKL %!UפBWXCiGW'HWrAXBty: :Bt(5JZJ"BJBW֐+e||j,BBWV> Q~teIؤc "\̛A@+X+D;caMUɼDڴ1].=?'Dk}{[6oˏ\x;H遶CZfoAW]R$Mt$B~1?]!J::EVoxҼ]mued{/ЫleF3z9\B2^F|܌ 020:-2hUi;2NQT1]`d2tp9O0m+D)uGW'HWۂ(d d\Έ%J2|6$CWt%#edtJ1IJ<$CWV^ZN43D S1p]l;vDilGW'HWflBtmY׽ m]= ]Y쳾hߗ5:NWvˡ+ҋsw*W;3 X\ .wغ]?هU'nrj4zo^T7.|t/Gp>gi`) f`>1r^dڿ?ֿ?@AwŊm{ $hZ[gN0gyf?t < w˟ba4UΗݣzz -}sFc/.z:0 9ҞՂ_@jj %Sh6{_~߰e?>n`d_,FXJG}H߸g~wT *a֢\}(=ЯkRHY׬2#"S֑K-r79c #w/v1+5*\j&6A/w^?o'37V͵Nͨz+}q^ygSQW_ųe`mQ2ugO_kc=C݅%nZ',um>,۝^LS[|޽4CmlF0cF5(X0FS=bLBc~@>!=: HR4I>a 8c' .fBTؙu~wM]^|ƺod\ɯ#˻g0C{ۡK?bF6da4@1k]7I)krl(-t5uD\4)(KY +\u56pΜMce@2ᡕ™同*(@(XbX !S(fIl9%7S!|a>Ѭ+NX;>pugZ?;Wמ-fg{?]6WBLV ^[$S_joE3KD]k :clAB_q`G .>JPw%v܍7>HV8=7$[޿Ix7r=% i7Q[s -.+<3>S`΁_R&'ؘ+q~)MZ8ߤSчo2qy˷:Nn;Z08x*KM ԉjuZi5OBr#%U5TRH&F1 msmbږ<GXwӉC[{?dUWeXLȔf,r \&H ۤ %'k./жmɋ^NK'm`([IӉę3 OfХ ']덠QmPo<ؐutӐ}t-A/mbMa~g/$ )) |s#hr)73Ʊq6A3UkAA:PʉmPHʞ| (I=Cd@HCuR=g[\|&O{mUFi#!B^ly-kA]ʤq#3kY59r_CƦ@IiB %iUbR XK⃜tVgU?DԄ@k X٣I08cpk띱eYuJnc}PF+k  N1ȌRUյK >*sC)6T HlŊ;*QΜacX7p&CkH{-GMԆ0•RvL>oA鐪UTBCA,1?Q],^,~isJ>y^E+"/!rQI^~ĽԌayRZay0԰92\_W}mkgeľ%\$g68,4/Wec٩P4&ݩRE\-^c-Mb춡L&Ey=FN~-o$WOI7?]]oD;YhP1|yt fԱrov)s+BŋwTj_nG[xM~T]q<n!e;aNÛ٢g/_X٫~#*m$[GB{Hۆxu\ 1وQSl3bvƜV;nΡ:*ݣnnrۨ[U ٮ;P.[AƲ6Ej\ ~գ >l]6/*Nt-և/~xh_?~쿞x2Aa7"q#`~Çkho3a|Ю]_=떷Y[G2旣/|-En6G[ly7kW߫Ӗ?* -4[׋0[ӂAs6qѲ| TN%B(DoHXdUԒ+jW:a@Ə3s#? G'sHPzЇԈRTb[VlCC:ZԞ2qkBEC1Q]>4r.O !YOi12\h^Ysn\deV\lNbCX7:X'`}]5->o+`HL*g2%bQ8 [J1f㦾 PXqd䈺 1zX4Ta65 b{U:}H4__UGKن[S*>ԒbJV%#xkLcN\(;T{870X@F}KS(FB`anΆMԧyٚ BK?"=JM7=3tᴷϻX]=) {E'WcKuc`+5lQ/vY\9pT?p*J G,X_,0cE%bMTE!fǠ%Q|[1KTcT-B\8#P2׊JvڙE1ZH8#c? iƾXعb  %jlxKq `))MzKzԏ^wE v_S%[~IϠgvJBVN-/d,!–k<% -ܷC/ DӪNbmp. ]U- 6EsV.)*k&[y{:kkOd&}gzS8'#\D睇\-Ռ*T#0AhM\2qD|U!,jB'AWj< 3%\"G.(jkj9Xkm#Iݏ#Hv7%Er`uI`T )RҐ9g,4{zhʂ%($))0PKH9S JqT@p&ew*G9K]G!wFn`Giqç9K%ya"R]K.b{7)§e-|nsGStJ#I r 6dɥhIJK" y)ttFzFސGda'iFj;qJ,N^\v[ÝΨBy:A,[;9Fόz8ٓ'Q;"c:suo{,Z\XgobMѢg?0G E]kvn70#H^37w#eyOX&Xʹٟ@,6#epADweHJ&F<:m5M(ʛh:)j4(Vu~e%r^Bbg*^otsUuO_sUX*_cwtEtmJ"5do;hYN E]pVF`e: _N'~\<0M泥_l4l!fHAۥx[kf#I8k.YRo7~piOzEJ]%\ P.fW-n:ixgƥWȳEw{u8vkyRO,StR熯yѓ_\N|nA~BA);yu8 >K 1:¡|zQJX.b۹XF4`jcg5dvQ{%G?;"GC&Q.)䪌Uy ,]LgDW>=ߵ!qPs?tzoH#KWqўȶt% j1<7;U'ObQpbr],BQj;6wt#]nuHO7 OLR浌@h#a 1oc>6u\aV-/Z .w5).,ePL3s˓3]%l4^q=FUxk,{ rUK)o-*1BI HI`X7y<ѵXWxm$'a48O9L>$7 ˥A;Чe4w7=ɰۭ\{pHE)F߷'gHTog:fXB4\R$BrlVѧh}>hN|-'fyPJ!:) "8Qtb ǼM{69DqyKF mF<5 <%s(RtO\h&B&RHpՀ;m<8t\D"HnР Mu lr_9?bxlg[ҩ1Ry9I6RM1&IpI]YQėP`&.ՂH ٔꄜKUD4S+G'P OdX 6@`6F96#ǐX>EH62"0QK1ؐd;dK'8$<#EF́jl9p1h4hr6́ӼN{gEKI䜻%*ߩ<0s=dD>:nշyӦx,`\؅a"Rg-ך*6DB-ME 1Ax(jpTȬӮU{lV̈́\=羲]ܿ}7WxW_>Iv'BU6NrݻGkdeSi(mDDmcIByHͲ߫Y+nķzZĭJj?lGI<%BIq2Ik{y;l^ZZ W+(a`Ŵ(bW:XI$j`Fk-{C^tU#;*SH[HBNۮ#*!OmDϘ3JXpr-!M Hznl $ TR$2Av<#:j>10%xmBA!mx qf:vJ@gs&ч e725)Jk1xhD:U6F)C"&ruPHY"!(OSg F8KeCd1v=l~d[ܫRΗ֒dH_r)as9zzlR?8;!r+iuL]ym ;G=dOY^aY$ .دjAl+Nsn׻6zmEek-pv$yp?ꎎx׉Yo\t_<8AAbO/VCtsuJ\E%Bx[k;AJJ"M2aePғ'G%DJ:\I]ރfTJ:BIg#?\rTy/bK`) ^$S&pM=uce&PV; Tx㩎F&ꐶ` `A牖4$O+靑[IopO i>x6ۅՐuY-o+bJ(v!)>5B(I^h~mRstk%J<ZaTsO(ʁtͼJŝ]j>UO `o6vTaΥ0BonWYq^$z|o&UK'&-&K1ָ<ӫlp-uBR <SG {c6R.IZM)jy_a5hX˄͒5߿[귟|BZXZF OL3eb9+ !U^u5 @|a]+Vg׽fG Wq!?b6]MQj O()+,b$(# _n$?}JK!/ReESu @D颶 pD) ЧOC,G{B)TI՜S#aDg)nkÝ,Ng",zt[D4izssDn:w?A`O))g8*5gRj`T`-S`-kP !x^Zʱ3,ƤVF4U&w8pԂ4}+Rp53+ z+$8 2CL$}J33+М 5 3D263+Vd@p\}L \ej WRkdjGC\3 wTg2P6  :J6Ԫޛ3z]=G҆ +$I \i F*wB*)Wȧ\wWH0`LmL ~Gp%\zM9?;\=`PvdxœETg|o p%G:t)grN^,nKJP.fW1R>"G fz4'A#41ps*n%wL,8 QJ5"Vø,^iږ˹OsD<.IxvLje}y"VLpPIvؔ /zAqݞ`q3B͐``tL.C2e*ugq>BW`>SR&11Vޔ\&p| W@at0p5Z*u d@Gzp%(jHuH+`+VA Lp JRWH0خ2(jũ]⏇+RѯY(Mdj eO:RzojG*9Y՝n))#;u+ʻ}winӤ&Y|#F$'yyk[J[ui󏻚{m.rA" TiJ7^U9_Lg)F*Oݦtg@P!Mo-og{Ysfֺk)O`鶼n$/9VjvmCwMa?V\CxT=W! [*O}a?7aٻ:+WnFs0l6~CHM"J߷WűxG<_ݧT$'oS޳<p#o>gQfmHePt[|5)Pm/oy~scG}MO}yOy1nxrp#jPODhdPfÞ,3dKϿ\_S+Q\Ʌl4)b mޛ zc1\mfiDB_v6lcloO4|Q4+-FK$w0bzs&Lۋ$Y5sPYVɗVbuF8Q,\B,.EM^C y j_-\[q>sw@ HR IG?c3cs1 _yՠJ$M-G@-g: ٶR141}WmOΣ\Cu-e`0;s|R*c6FʈbWw&1#%c%J@# 34bi߇lRLZWcARP,)%sH=ɁObsA>7BsWU;3(k#2[h%5ۆ6K2#[2lMY&:+iM>by`v.֢vSN]攱~\;3dTXKvQxM& VCK vyЗ JS{hݱDBi"i, ֯'2-d+搜&t8yؤZcXb@^ZcC qɳ-,ba:YgrD,*MVMLAL&X@XTt(:ۡ-1}9|Ԁ Wq2HPi>*ʡ6,d0)/A-jq'YWe0Й- YZzeS`QTs 5є֭ـn('Sb5hWQ5ı t{{è|#ZHqMupwCZcΖ U #' ̆6'e v'&Xd*zߥA Avg=m ߕz- dܡaS`~ ch!$(  e>jL0h"T[SR\A=  n0LI!!.A D5v)*I0Θ3\R r6Й _ kf7M-UTLgac(@hq{ՠ":d"[Phl BR!]VI{GUcܫ. R^[3: :+=1F Krޢ5غW"2 ʻ5ftG DӷG6ĥ#z|p&{L!M/d`!Sb_ݠł4<1$hNo: $=:0B`OcP;v2񉥞ˇ1Δ!>oUUz~ԝ=#1YhDŽ 1*L$u&SɜPt56R ڦ:kx˓]@OrUBE{[ɐ4Ra2]=wt,EѺ<0{:K`s!Y'[_ģVH܁m cǧ>.d!:T?3&}Ǧml|Y"1J }6~q~zo"w }v9Rm V\Gе2"9qwP6" GȋU}L!U|+JI @F| $$L)Xsv.J ٘} @oBr@KS%U5#g4ƊKEk(5M68am@ f:x\Dc/XHK!Un$lƋw}>~O.Amfg1ĭ7 9l:>7:zyަ<9?뷜3͘ IiaSC^v sR9P.ϰ[T3LtnJt;cNvp l )aU) sA)a>z dP >84`uEڰ$\{a'F_7Ŋ*oXV1 /(sUx HOGM3"F T],kP~xoSژj#|ǀ61x_sJmXv=ִQkLzĤ j>ڗ ޢϯ{F.GyƍAMhǬP kk (Z3%a2ky/FWf72O 4%^t5`=}GCXPJ↕$uQ N~SjK2hw\dBC-Z*c14&ٕZMp$~k7L\Х5s.?twW<*Z P$pOl0H5o.B6?^[ϯ/]V!M, 9ch/9{s<&Z oBv,PD=ѥwJ_~ۋۼxϮ%vG絾9>HyEz^~_`9i5x|q|+OWWo/_.4c|? ]}g|˻/~q2߽Ilvnnnn_3`]aK _Ť_m"U 5ÿn' &o& ~@!IgB;;iH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 &[J$&\L'yM=$I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$Ml) $"&m' ƘćMy$3LM7C4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@f@ebӆ@pIMm& 4I#|(%i9&"8kH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 |@I>l^j:G߽_㮽G@A\Np p#\K@)^K!KbpfK9K7CW{wk8]mrrtHҕDaDp2f3t5ᆴZCYdpɦ AW,V.CO2],**]}[ L]ufGw5|wỳu}޸q_Gg(T|w/>98/OQ>\ْJ,ˏu5_ ,GDv?]>yݜ~ћ Fhu/ faBR2|\=xD?nIU3}~ /QOwNw3u>f 7_?[VEq3X8;Z/̿w~ǝ!11zڊʘh)ʘ(YUsT.E>#&py3t5ц7EegHW>I[+kՄkVj}7Ў ]8 'u59O&JJWϐXkxjQWn&J9U7o&\BW-CΓI]%cI 3Մ6C23yR s!Ä+q+t<]M+]}3t>sKc{rz`> .?1]= m䧡ӱ*)]}[(ц y3t5>uvahD)JWϒswOͻS,_]˥eP^_\7+iI oSr}yy{Z.ޞGK|\xsW0?j08 zx &Ee-e>QLtȟC陽--m^dbws'ٗ6ɑdOfoudK$q#E?6_UZt\`A7W\i EWek\seKΠ@w\1Wkx"İ{sJf퐹BIvU= m7WRY\Y ]]r gw \ۙ7VֳR\}38IqvęqhՙbWǡ4-cWse{sus)U9]&Z-Lj/ P-쵴57G!Ak+,պ,4ge)UU|Θ+B 檠S͕m퐹`Θ+{ m7WRZݛwh@1]r 31Wl}ȹ\;g7W\)%3XCw;쪠5gW%boޡJ2W0v>UЙn*(+[h\`DgUE檠檠ЛwhP\7gsEp;oQK3H({4WVfXV;]Yš+BٺÅ{su착lfg;w$`<vޭǡEvÅDmgy\2;Z09:r̾qɸsA-<(;:xQ |-%vr*rc s;ZKZۙAM0wl;f ڦ93]2E2ߎ^Incټr9t?\=P)L6666 ȫݺYY_b3Gw (?! $SO*\v_+?=?o6Lɉ$jEY*L~uDZֲ?i~]~u膃/)jRwÇ;mF~"Uw}~Gu@OU7 rĩ7Z]#idd۞i?)L tΠMxECr%b2@%Ā*$#c$<Ꜣ\|$]uǝ\уN.ib.YES. &{$"灁hM;'(~ ڨF:Z9@ fmgSk#& ꫓<_Hsu-%,qmwz7ѲpaFV 9cruX.f7~ӫm V܊VƘ>>vLLn-x5MjDVeqݎnIgq]oBr!,g7*w;qj?}<_鿯rI?Yi,OeHwa4!ᜳ]ax}X2 x;ݕ}*"fD,g/WxiV˲IZvJ s?~mɇ9h{OAΦ/T^_!Y3%l?=4zEMźw\3K c͵y+VcGR3sό^ŒNRxOM F]|n.Fnշ~]7q6nyKXSjX` .T=M>Fnmk h̓`zb3p|*cm}/>ݤŌڕ)^^\l~rmx W{~>7;}44Z5&VXeF6>4~?9ҧu޸kl˖G3iTc84F$?)_ڼ=ö t֙F;9Q](hl{D}{@V~|p;+ rC2iRpnQQG|<#|G(*V.:V.,VN46Y/A\@$w(dwOAo>D%|BKE0T.HRdY)_bt6PA=aa2cy>V )V.OށWSt Oy߼q佁xS;f5qHU &2`+U ++YCI#Z=yDs~}ZU( N &&d}ĶQsaiS2mAJ(INuoBDA&:7MJ.%r9\#)E\$.2њeDNYB7<6%F` A#7\^Opt>VlL02L׋UǷOzj;ⳃA6Ӭn]!B.KAB.o7E_ZW9>~>OZVԲZH{Oz~h|8=/xOOwԣx \tLNjM @+F]Ds9e?f0{h/} Y-d/Зn5op;75a~xk/Dođw Puϭ .}ym@&U=7U6`#"bڊi+6WTYWrҡNl-rҡN0qor$\H|0E N@$?=J{cDL^7섍1ry>rl謏@EjN[%XZ憝anig]x?YWY.|i4{{&hOS7 <p|L|U %/ MiVo[bSj&ϯ 8t6^*Cʒe%OWX-P&Wefޜã>nCkۆBWfH+YDٟ1979UBk^Uety)06!-IŹ> !}waM>6r?]ծ29Xy&+i-VrP2*1n$6Dmwe^'Uy2OeԜdj{Z^TL{@ϑ{PBJd>}6{փ4ِ9:HB]J\yDG;tTI$5yܧ*"ϑ|2k1-o1D ZJaD$*zmDy9g1Ev_o{UA l᷽s.FAXLWaWƴ}AA嘄14XJ.W%\UR֔B Le\_66Vl\XEr9yRޢa,fָr xev* )RllEƛB;jkLR&Ae r}XNNɁh()8{ņ)Ie 4d(٪z(+tٺH"N1'<#R׫Rg4z^ϮXH!4GbEsĈPD%+"w,2m Q5ge' -2N8Bm LX&u"+S2O9+ֳg^}i_N'qką qA\HbNDH 2>Hʺd—C3UYVag&cOڕ5A^rT2vDtI2 =jjٰz?#{ڂ ,'.nht#HR2*r F'4SEUdy{шᘦm[k5&mۮ <>dg&h12&0+ȁF2Ýg"t'2\DMRUCh+ِ|a[>@=d@46CΜ4:K!(xJ‘gQcȠ w+2:5 јa1z7- kp݇Qn: oDI[&$qɍ#BNCw;()irφPȚAEVjw$'tf9}D,"%p̔Yg]9( $KpVܻ#+ltn}6Bǿ]혖-gw:,s,R1(i\ԶT'r]"@qƥYN Wm7n&t û)QGxE&n6?Ng>7}9GJUZMۋqm}M&mqGg^xg'|'^>LXT&2` ܙ!"ODOACV7 )/w}N?[yTϵv y[CI ͐{Q3z\xilgq8MJf\CB 8#[Jrv}}Uf݆?d  y?՛e 4!T1HxNjd!a$IC&Q u&GѾ0#g49yO=Cg7ՠ['[z1y'U㦈kqM.4qcp0f}KYΗK' 7Hɘ a"tc"ʺ 5#N(|.-.|V6 UN)C^{S2N AR-i/y*Ű Gl;EY2 6'ϯ~1~[x4_|刭 BRpP@0Fg1EdTsgRT(N><CVl`#saK%8 A1TBݎD02y t风RjSJ&rCČVZ)ON( P6C/xϵ@$) & &E@eXGhk1bl9sfS[K\zUzX,뭿}f#N<y3=~o/Ŏ`yBa! `j,Ľb6' A#v@Ia'r e洓L$k1D"*]%MYU$eU!@1GE gRX|rT@ dZ_r1r nH<,QR'o(͒Has=d2v`dnK뮃5/5y>aCL1*$*xv&؜'&)-*PGfP87 <,셏^#׆<"%;I+6RPP۱X)8yesM,;#8Q{)P_E KI:}scx/'N&zŚwc:XNռQֱd@M b6z{]]Z@+ưN،y0onfyltTջ&>z5P3%,{̶n;g]i<Uj㭱J暑ȽW)F,Vz4@ &% %i_GPcG ;.*H2 . O$Ii΃A1 qjsʙd})HoK"WO4[޲[V.Qn]^N}Jܟ/d>kzpFnٲo$O˿O]%Y|QE !ǡ]۷붛'հuiDz<øA5ذ4R5 *5Mͥd-Q\  e\(=sS88'Ʉ>GPIolu:eQJN!FpD,hac`Z 2 vtJqz )J= 8 =ețx_kz#.)%Bs]BP6^ZO#wECa,XzP<U%Em18(5Nj"@peh ґꋑYp9Q+n HRHC$AP,H5 X('%Aň&v~c⃕'b]Lg>+) 9i1B'|D\ D3p7L`jă 4,*faؐx EpS ^d#)"m#_ō7 w?\ ~i.$--g^s$YQ(XN4̾$4=|{C;[Ӑ5ZZN,UXu26F~hk\uBndd3ApaI9k 5dg/7 v !?C"7x;?]l~c޽vXږTeҖϥqmQ%29i9fLr(hK-7h'b }6i-!MїR1@j92LKM:rjz}ܟ -q3h׳ټ \WmtZ~Or{bYg4U=@5k7P˚..㚮2,Ԥ+&[TUh뻮ҙQWԕNoL8jtŸl-ZT}וPju5@]98ZHW 썯FWEW.(3w]1%|@_8ԣ+ jѕbǮRcWCUjHWlѕjt%G]=]E*xu 899z竹'L׿#&´S*LIaڢ +h(|^7&m.mlYqO&?m;Z9Z,78E(sbғ?)gbr=/mϹR>cqN@e^F# UeDBktߣ c1(C+PSH T+TօJ(7BQW)O*o.tV] ;QZ+*ҕ{W֢+J(i+ F[عzƮכj1mޏ]YJu[St d\[OtŴTuŔ qk\[z~N\hTkشSD:MF"W2cS4iֱSGL t3M2Q^g17&3 -B۲~dY?@y~PLt0fE1 ev iJ7<~x쫣wdX=狷8N~76]_NtֶT73{wvQG^^]юEO*8g3;PgW=wEtE[Qol&Л]:ײYtm<@юK$1zs{qiP7TˏYϮC ysp,o?B6ne'OȞ=Y0KvO7*7\k F;Q5;ILsfHW|N۶\WB{Gw48j"Ek؆jt%oړSP+&[ؑFW]1mwQWԕCMA&Jp-֢+=( UЏFTu +duMѕW4v%ע+ָ&3WѕВ뻮Ҍz>2;9Vt:݀=:{X]u=ԼNzpOAWfcL v]1UXPsF+͗*S~Rf(0UTK>'''Ndvٺet#TEQ'CxЍQPF-ٚ:EL5b\L-Z}וP!H+LEb`] .U] U}וP:u5@]uvDWT+7j;+Tգ+i}וPk jt%Aբ+|ROu%ct5H]ylzK2EWBוPj?j D4v kѕOdqXXqwpQXWh@RF{6vЕuآ ZtKꔞobwHmw%gqqo/:+7.n^M\NYܔ9r{'~q+//+_y~ĽxV+7Iv,yf?'|=a'Yߒ 9zyO@iIm<3ɭ38ܽEG_֣Emy"8.]mq?&mF!wgҮ \_ͥҋJ'PJ@+6O!W(`]]O~Kul4,_V8v=US[OXi|?,5g`WO&XMhCG˜86XY JFW]9yPҍ]![+]1!_Bgq +]1SL\]Mt%Y\BiY\սU>z jY/jk3YbbHjjhq>\y>]8yuW^*t|^.RrI{P˃gqԬů{wBWQv /(ś{!YˇDRruʉӛ+@gu >b!I_ϖxW1ov᛽Ơ~5`4)>X_yU[Sn)7kqOķ\Jjek^|%O\UןبBe7w=#VQEٲWkțOY<_(;G+'tӑ=~?&ڇULoʴ)o_-)c rsؓJDQ 㶀L`S U*lMk}!jⷼwK+?#ìj=]ץ9s_k;]6.EmT6ocnr*h46kK&Q-~CEИōz&)&_jBrY1kbچ)5֩?-DPU h!WZpP9,la1*K`43RГ}JZWJ,k(z哇PJåS0@NF\ m )I%[i9kv2QCmumMBm]kr`k #щ#)SSbQ $W-=6e%n\-c&ySƶmGt)CSʺYym/hL{/WfKq\uH,5oѲ6L봓JjMpPRj0[.Iާ\U͡u4[?*_Z+.X,br]ɖAz/2:|d坏0)mDzkybK)Qȶhyd8bmݛ[@ZUJjbs,3s$GhUW,6@ {g@Rs7?sr`K!Mї`r8_|я׈uihw- `Vٔj|Iq ɢFhY\5$dJ ֦ئ ЦqVXYcA?u1뜵'ո!sV٢\ 65!fc&Zll*]84gAA| A:*w[4 Zn6L!@_/Z ؔO K.Eq pa r GtP8`8gZN=ڶR˱e{ RP=i5RTQUcm$+K,N,OrK866V`s q> $%n ;Ze8¶LMʞ-e]|f`8kmȲ.-! X, ,$_A=m)R )=դ(:e=Z#vtHyֹVJ݃`HJƬA6̹ڱC*Z$D@ri+w%CA`KFdj)C':ΒɂpTXhJ@w`6I( \>8|Rq?$X9 J W4"Ta. Wj,pLA@:9g(!./5܁0g ˕3V e2a>o0C@ t,<\"nQX$A2htOeKRI(o L+U"; VR r V@@'ӊ! jo]B1(J892rAjt6h*fAQ 2"}_* B꾺2ZRQv JHK&Ո &1bqѳV JA8(l,@x@Ht! /\9$T댉pXȠlV -env׋q2mŬ*$Z@QIbYjӉsBuL ع]g.ͮ-kvrڞk*Yo:mSka&a%@ /fPætdnIKܪJZ 7Q8dgaڢRE (%) >@ E&rZVȼ"a|R(T)ab偶SP&6Xt_ u<oĭ tXTuvP% 9U+ϓ#V14+JX¶))e%) CdDaCIvy ?oټsȓR`QD̾ ]{ #@ |um^nb:EjC$*f7PKbTGeu $$BkLFQ{ |,#m6T B;k1#, W#3K:vUM9Ռ: hX%Y֨4jӥ\sDڢ td!"Π$1,PMC A;H-2j,*aE^Y`(#8Ub/'R% fcI; .mtОEwiԀ,&y(m@*gUn2+QEXY or@f&=.y6$oYBecƐ%Zi̝Wwjڕ`yumt1/;52 =5K0 馫26TE4 v=ځf1h.֐D碦fdsC <%\rI,Bј2!SAB # RT",Az|PP@z[jT}fV &iBz_yE r\B\$\1 94R&DynzF-T,J1<@JҨZ1⓪`bYp`\c#dn҆E8zV19E@KkL٫ZzPpkO˃>T PMIA1X~gîAp^%n5Be_j%zC5*mP z0XAa,gfC9 ZV2С 5W iq#F0?XJFx>h)k2J²mC3FR ЭtD< \Tks !$*V/: b!] 12 PZĔ!T$"aB%N 踜f !KklB9KBרvwE"BPީ#DoaL2 ՠnj^ܸX7[a:$C$S—c:_Q[jrŏ?>FPəwIEkETBYWj';7|7H"%4Rڎvx74 n|=]<ݍ]fw5'5v\qaO]?ڶ+VJjrrZ??|A3Ru=~ m;'l6m'?~'TuBWZ19؎ 扃 S?'G% @FN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'Ћu'#r5F Z5'PCٱ;3AN^~y:@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 uIL9&'{1'8}a8RE: '9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r:MQ fDN |4N u@m'"'KtiDN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'q}^/Yerm5mח vRj~W#L qd\`ic\\eGc\Z޸K㒽#XTeﳼ`͞ ~h3v?]{Е%zs)P2j=q*s\|crQ=IOs*>MQt-bVζ?#ԯ_6}9 m&3oa kھs(9GWM;|dnzyϷ}!0pڥP*~ukQm]WJw{}WE•3WZo\{دۻ}4k[Zžemi@{'_n}ݶYo Z39w&?r|4cQ莋ʝsvj^po՜Pe 2\Ǣ2Z]e4֑x*CXk]5N\BW@k>vj('ztնK93"mxr,t>z骡Tҕr 磡WUC+UC + 5+  ]5;]5DjvGDW ]5^( R(HW3 O1UC^]5]@r^r;vf<%1r^1v3HW[ rUCPNޝ2u?gϴ^(OGAWt刮:\ Mi(y4vF%p7e}^4F2YaRt{(g{ӡ*ݦ鯋6wv^?N@wW'?lf~uMXvc&g{=Jh|۞/CpNY=;OcϷ>p8Йf<`<^'/:ͳ쯦y NKw2ީ?K3JXy}+-^.bi}~5~,ι 7ayUv{"9wڴaP|a6G$u$XnP=hS'$v6gDBe 12i9ggB|vN@i데VVY&L"0o<*wf EMc^yM=;kkՆV fwu(5]gW7_|Qws}K6|]]n߽k^6$ӄf jO:.O{^_nC|{kw_mi'͍˅!Ňċ{˵2+VLA 31I턶R,Ke1jJF1k=v:oUhbYVϕ|y 4Sgx"޶L&MQ9Y֖+!J(|P#6f 3hNݽuϫ>0/11L,QTTr E$,3ܢ׾8{V-heĴZ ͱ#c˒˒s̵MvZz))d)O{zyl}Ƚͻ/?tNEd//o=_Gq(础X;9rEHorgK1r`!N.;cqI@4^UY;+-sYvטd <%?-->VyT UR%c]:8B5xS8\])Q(UI b2A 玪sr)Ke}v߂}c;|nz]܅g9k{藳UI˲^}|/t.\N;|VJpm>l?Vn|][oc9r+zMpƼo,6Odm4xjI#SPK+x'l2Y`jmY>DeaS,Dz, n? כD߈4 &d28+](qؔmIT!ބ< nBʋ3!LHRx('K-%,D>bs$MXT:ij@PНV6Zc'v.r|d-yxP)=x`9i h5q._(tM 7Ƴ5ح 4˾P~J+i{7il +mӲ-w⫄^U,رQ}jj/VKUT}^*mVUup{LW[ǤXm lK37%ZaƓ8vp7nh~{) }"a넷w&4E.W=mcϬOiw?уu˓xWqR@Woyδ uqkqr6] f2A#-0CN d⫷F+W נxp81lŲwH7rI2a/1AbԧְhWmjB X䢷hvX4od5Pt~hMALPdkKnT$!EbRbLVcNX{i-AH1e QsI3dT4Aʇc9<\)qu\7&d-of Vcft$Fcώމ"4:;:?zff=٧H:ŗWe zZUjǞg_c6gv'\{+6~r6o]yZy0= nxve|!sȜ~ Mܢ }yI׫_Oƛ'V9FHGڈZlkc"\n=z'ڨ>~6?p(6o%mu!JX,4d!7bbɷr>ɷv>7s>X *Ez@c4e9eGk_B'1ΩI*CJt>'Jb:P(X;kmDaaEeS5q;%9iΧ!x3'͜^bwx4߷WwD}E,ki1 {ӧwv՞pڨmp2yr\Y+$DpR!8@3%&; +sjٸ9W.G囨ƶ7t/KrG~flKp'ӯk#"L&|v3_nBnoʎߒiRE{K|z74)+ hTjgٔ0M •#IAKM9_%qt/d? VӲ s~R_,D:5CNPNf'qWKKˣ?ʲ}!_~2zkQ)]&L O^z[7[1\K /(ʊ=݁ᬙ܏ ܢ"Q/_r5l %~|~Ilvm mA{dRt22h ioӳN60|{~~D")p7JGv:th!6/װۜkץϖw<Ūv PZ/et:}lY0vH-j٬nM7=?5>=oܯ%'Tw?'|;. J.zUS3zzOfG\ ̻]=yӫBx^ON@˕ԆVj•!#٢"JF֨ʌIkUoV'CԄc 5pL[OB )ɘF$d!U:WhzI@^z&*ؤ|\TP vRJ"iI ˣ3 2zxNuI˘"()dmTѤW8/] ZG_,gЎ`r +P)JY8.|2>og2SuXαX. Nʇ0:5}+0!evN%;E>Gѷ;{WCLM'O[N>)';0/K΍}'ڞlssJ+x]N) "4tT#aKFB"$K3 h*iU}\ ]R|~@+ _>O>B_[䧳ְҼ)QߚyX_zBORGO1߉b ^TL%WWS1̵TL-Z~SNSF28C\g>-Ï7qS"Zm-6`j,KجR (7P#\l/Ha$u%b1͸ƚL$2cBƐȚ Rem&uC%Ywuzj!)$ H3y1LSR$@d]oVf`NiY巔"D3_+r,m +d32CV }=)PUQ[C& ؄ mD|NIs-= hъXsqjXXM3NBY Eg­\Œ;L7:2'٠a4/sĶFyTZI0d@ :en#B'jsxLY+h`V66QCАp ve. 6Ofksd6L̋aRP[3}2g&Jᠵ@ Gd{c(a^hPi㙶Iem1XERE̐lH^2#šY"D bN#m6KwE<&an )W-G#6 $w ɀ3!h!묹H#=(Bç8G||մT\TqQdh5R)iITH̆)Yty1 BCImKjq*xO3@X5Wq ]qkSbO!]l A2CQJiИ+nbv)$! }~5jVlU|qyʲ|_ly&L&hc "lEDbkLd"bIHu3" AeFBFC; B#'@DVfnӑYVC5q lvS c4 3RIu)ʧ7xnh' 1ޗΦGpQ~2v".PڂFx]L@#ٜ C擄zD8 "]^q =00H3 "ؠB9eQi!r:G y\&v<=^xaɠd`J02G*S躏3!ygUd%[,vb,sVTz$-9>Q/=+kU_m_d NjYbݦkXQ82tHږ0R / x.A3yx悋kuX ybmG.|[7JewXhF8'zO%MS8/ߒ1h2,ѠDPWJW? mYk`#-"0]&nv*VYmtǹ1@Kk<<8V8l e]j(jFॏO{o ϴ_%ţY^A,iI(9?Uu\|27lf@__S;|<Кn>ķI_V/BRj3яT`oenq+anV}2^[O~IW6=2h&_"/;w8ݓ L:9e.'G<`/Od1\lF#.۬cgۻq82B T{rxELR&_**1]m㷑(3i1L$O r}˿Rv|e X{&n4&ݘ;Fdr)2*H|,۹-Jg?s hpnvWܳ0SRoi\jpl4}m79OY-{zp ¼+ha~ a"-6ClGN(K|dfӓ2YmU z1 :*D) 8Aҙ2"d-1gecOw#H~}A]upng%I%)Yڬ2 mm$HfRT|tBu |Z7G~enle ;a[pyD0Jh>"`L~Hog:LA19W[!M-3c%DGhъm)Jy#X-#2ߑ>ce1"`$EOv^I Iވ̴(Iۈ !qsq{ׅ^LJ Eea')2Z e􃦠G`Bd-C&LLlW513+ t)(6S*m] йJB,BۊOIe.`1_[Lm+KE"vMM v<;TStdugl3ZqZIF3< evdMJ@?A^XNIE67^eF"PeL"D2f9<%SK?t]Ҡ3p(WH :gM:k]"3yt/@KU3Hm x)Һ˘8%_ |'\Ee PAfMڠSmIoD+{U3U+\ {;Ļdi)eQNrDAxo8H eqd!03NNvBnOWրӐ!3a0TCΒ0ɀZ0 3e \lzjD$|.]!PdS<aCRc h2JbI!epd. n@a}OݑBwNgg)ut\K<puI=jR7M{<1.](e;<u&sw&G9bu: LJgmE3 ay(qf-.;bI͒R 1kkMS~5oI6:"F.5)), ԴpHڀgRIE2,A|$eJɝfÂ+ӣrGwIBq#%2~.*[eH.bA@v)KqNLb| 郅g M&bWa$U}5k_É.Ώ?0Z!3ðV+r vȔ.6I$YDHȭQiֺj59{Mˋ^/+gL0g(۵jmHF?]^4 _w2j$iVV Wcam*GG U,|<~/j)rvIu\%KNKǕYos]xķp;FXzidգ`g)Za JQNs' v}4frȩQEo5\.ju崗gLZ^{r} 'MnwMƤjU,;RsםEh:yq0>^FiNH*NludDwx-]e_7qBLO娑h3t8g%b[]Eؒ% r\=z S3~W7Xv9 muD> jWD' 15*aU]}MQj۽ma44Z:XXq'Poմ^*2nvcZ&}<bDc~?,IWYמw53.PzIClQeFwC7*WI#cS؅7+xRMa]nK Ϙ] 元 4yu3}M^fZ}S:O>? `#c;+lO/ݓEεAɗJ0 2UO7 0:k5p7q|3ӍF+&$tGY궭l.bY+JOHX0/ֻqsQFVmdz'4,w>|O#;Ѵe4@6enXeb{4EɣÊ<,@=?ʕL σx-"{î mm}`WmcAiaWvͭL~v(p{>v8h>zcڕ}=tAjal{5uǿ/@Bqgr](uϮ^"@+.}aWZv](ɮ4K47Q򼉆Wn2-hIJWW_%|r؄4?jI)eFj4`Ψ VLmX;4ZcE~y~ϫ?V̠ZӜ}@`.RDzG{Czp$)%8b j0l  I@ =)` H YhՃr`h1XXdb@61\ޝb"r3UYĥG%)t/z$r/iPrwș^zJ:Hr, }HJ+HIHSF'K㌒wJ `5 EpA9jc1YHgb4S:lT8%>n :X$1M=1B╞K̑\yHwQQ9&2m AIJ b6XfS"eY QX"ݓ-"f039W?sru`L`!؍gNZ~$kѕPB; j j_`NQoo-Q- L^M͙n$"/&1-тu(XXc%MZO_npߤELdR% 25 0$9m%ȤI#߂6*fYJTGZ艩Nzo Ȫ;0IӣH. Vr=p-dA^z& .ESuR]۔;??N@JY ]Lɉ jk-Y{U)Ԓ&=3L"jJat!2s IIM tN[&UIq|6(!l}D1 bD2P ?Y4%ZpQ .3Rl"!ts -Bk˴r4K{^.,nD?$ͧLjt8KW1bJŀWw1oiIUFyco5N guc͍L7X),Q{?xߨ :mN 3[WȂ63; 13#@ƄYsTR!dɸ!U:#e6XSP\}+\d|d.sv; Q.^M;!On(oO Yitϡې^r 7q$3#ϙe 28TrB@5ɺ!(5"l%y-GT"NXPPMI\[JsO}{/ljzr$[|+?:..[W֏_w[̝tr&[%>rumOO%ת^)2sT>6"y%"c.s--Â+4= f>$c {I񅮊D9VST䂠HYydshEُ|jXXM3BY ŀʽ-gf}y}zy/4~2狯-h/2NmtY)!3B0 :)FqYbʯ:̊{> &() DQ K)c. P9#F[UG-q#vMVvڃK` WxwY@`#8n@em3c`5I4yCrkh[eq$/s:1",+a5qaԯnXDTQ 8 .G-JN4lNRjIn9w̐d  j.Áh vUjƨ.2LH!Z 6FNc)cVsaQy2>uVӒcqQWE=​;{e,HU @HiLH'uHJP,a4g\={7mng}]Rߥ+p%16dlpRHBhC~oݻg?9~n,?l?9IO&" Ȝm,4NL E)6P:N ,XȜŋTnFdD%mT 6$Y&pR 9JMDfU&΁cC vQ H2>&;#Ŝbn;EoS Ni%&ƽ/{>mO>_p3%$7NPJs"h46(]f>Il@A+5aGxF6wsLL2k 3cH>lH1,kUI5 ) dO6呾'j)J[bw7\f5Id'87ΰײ!.yG(r ta:QL< G' !)lAIP\&v<]Z#x^udU䩂<]qRG Oc${ ,W*6 Qd֥#irS#sʗ=kkf,oka5 ꓜڑ8!+K5@.B3p8rEL`ق:S%L3'7EXnVVS|O<ܝrnӫ,TTZW='6Qtz,k|&vy cuu6V _s13]&kytsSBkx=̭yM<(1B[^M Y(uRHH.L(U#0#D 7 5w_~d 9/o* I6ip.B%bB0"V5W+.&C4I7="-c2M!@es)Z3Ya*4G-NIKi >dg&-E_ %q˭%RYp $IGǀBg;X[,)Ab|TXop)rc3pkx JS%|IpD (B;IhT3Ty33p5 9:de&ռ,O+Rm9aO^vRZGPQքlHd%d5ݫPJB3sN5z ދ& @UJ@@&P3B@9Ro]Ms.M*)"y1ep&n˯&Xt[$mr,I4qtQoH]I8ЄX]ؽ8bؠiX7"{_Ѣiԑe㑂_-'qrO(Pdxa׸h)t0?Zѥ_.*2`Ph+@E`*[4E/@`.? >["}S\g27փtQ\+Jx V̼$=ϐMyQS Uʚ> 8?Aۿt!% 4 r-ȊLF fYlJyPbwBQ,=Dv 1 .>fȜ"7T05Oh}x֝{Y8?N3W0q1Zp5z+\fiF IFOIZc_?+{ѯ?qwa}sdNx}I^Yq.fc3}N`lʐ}.;yFJ.㉟m\oOpյOkzE X9S):6kq29UWC 2W\!&ٮ醨4G:M^n.sfPhsvX-0ht)<+m%7i0D)'#H[Q"wv"3҈qҸG,f),h%„k'}4'_O˜$LYw Y*I 6;ڗT 󅷗#$NJ ,8ށ&61Ҝ D[y@FCL#vvR?阘WĔLM"0? nHҐũLʽT_`bf0MNdB X6Jˠ$,u2vB26NhA^uy7fgP<;P(?3CBK zqsz8܁)sɭhIBJ1(4vGqxwxe>N):m$# +e66q孊]n!8mٔ8c#ڠ5h;u-_\o˧4C/}-G휈]OaAMohy+3<SMԷo2g` 5 MNL* :(3$d'rm*zq' &GJzC!29RB'ʑ]m5rKFx<Nsv-%O4X 6EmY>`Wt,K,ZWzmhm &$,uEŴ+%.We[J df3!dB‰LH8!N„3 a))(J! G,d\x ơp=t'U8,-ٱC khCL5kKenFv:˧9lڲ,A|eۭ\~pafȓ  {nku0 :EM *ʵrsEo E|Ou U*%G[Z ,WLy=22`a`+Op*Ef=a/}`5<7N-oםsF[9Lo0j` >FgC9)#S %f'P 椕N`HCB2j .zmfMr|txi` mg˄Jv[2Ŏ1ȎG1H9ܨ,Q y}w?MWѧ|iaHQ;՜==ZK*>JyG%Y$5eI|K/HzY#gi۽7?́+}PCV}O$EҵM:ۛ6,|-?-`<]+@Pn^;fo_t,jq7˶{7d|/ z?ܛZI2xo{čÛTRyZ nVuhtzJPWtR41^%\eSUHWA\tTZˑr@' jYc6ZI dLPJM X2@0ZXDdLRC>XVKk9 Z\b,"C,JT4AZ#gwjrBdԝsҮyfj[?7$>[ ENm;cHƜ `Ӏ^1 j7t= qURPw0J0|(L^TuFZu[9 uU^@YvaTj|rW65f!#!-4CGa_{I s+AC$`%Xd3b=br>v>gs>^N!s>G2sE"Z CDU1nw ),+2p!k5NWіHE,v7kBم9ߴyϑicLZ+Q$IzoIcJ8eN:FnJGUvC|JZea 0qE&\f$pީMe9-4ge%& є`e25)ਃW.LD+h״Λf_.?ojIZh{销Ir3_LqAt.9ոVpZGÔOoFyC +IVIpCV݇Obe@pXdB` )MKQz{Ր#Tf N짳$LY@d2W_ i,^_PQoQ:qV o .Ro 꼾N/gOUpANgn?z)?y`cL9~@Sn)#LV 8ץOG߷oudp~}.!!d^^%*kG+_ۙI^j5Ŭy_,۠Oc.[M+]Ͽf~+#Royy'[;>u˃ wkeT^bV7Eݘgͺ's˺Ytxڬf\)?ܜBrS4y9NB0QW\eE]j *T.Ũ+v[zA@R9G0TG=z$BU]=Z8z$Zlwu5۩}KNu3_u^0 -H_ K -E\Dp<0af=WiK) D>B5#?K!~/_7LiM8,zԛ|F0SrWCןnʅObS!t-)x)%5:^٦b1*$* +gTZUrz.iu <p|tا`́Ts^ㄨ;d"?Ž;hVeN{^4˶fl?j+h )!$lQ(!%O2 2\ix 諣TV(cO P2X!xmR%B4gt ٱRRe[̈EQ+SaZ,MyFI2>YA4<:M, g'hNZƴ6sѢQE!۶[#RpUj',$,d]mhIW£x{�ƨ;a9Sަ:uG}%b L@L CdNgBH; +qU9TO@<6Y5YKa;&vXɼP.Tk<Lg3Ṗ3]h=.4Q9kq ѫ9z`_o=יǓG=.dF~zt6I=b'ĥPHșww?oL#Tlt>=KgXMη`  vg]I^YGZ0 gJ &ȕJԂ0Z1š8TV?-X~)+1+˴ yY)|9~KNua:kpK"C9l}sFe@L "˨-kEt4&46.34A4B9wS GP xFXAP;y4jJkmF_l>2@?d.C70*ג=zU^|nǔNq:Dj+v_RZ\Au4 ,L{DTҤ-w 8_Xs|RfBL\H42bj>s1 ѥ'H^I ##ބ=2-h*z'֖i|u]lZ(*6Vž>s5NRIPT`rV3Zް(U zG/jA *z,%W魰4`` Niy(iSH]8])1 gLY@)փ(#|~Agة}@d_>eT)eY%1b%$&%{DULxm %o31nWCg;LaOd-Cv|g?i B^3tAt` 9c'f{ c"D-)>:y ;:ТwgRމgVD9&wg\BJi!Z1d.&BfvO ߻X}q|kyJs'*TXn`~+V1K5FCB VK53rPg _І{S}}۩wS*ۮ/b6]) rئR3 #Av_^`pp,h"Jc3ޡp]Qؚ42yiB4$J0UiNCd31b@Rܣ[uĨGbRJ"lFE-Rh]9ϪCT+0=~`J~@B}ԟ[/o.O*CŇ"USec"(E^ǜI1M,da/$wI^ETT2V$sNHddH"X'ʂ " q6k%hez!I:, U@y mSR88"tF"xXΚurǐlGZCCRT>e!Os@O,PV1iBa&v/mEυWz =)XV2H"Zd.>a sL6E)'eUCfP+-&{;io'mwliY(s)H%EcAj-4FH*sPV**{Dq -O*Jq2-,B;2:LjLH'c9by\F-P(T6I&(2ݎ|4<_3UTc[gB<0tV&dcl<d%؄c.)5kT4 ƅp؀q]E:qȮҡK7A/uaMar 2nDR<ꊲvH;b dU]F(D)Ǭ`Z/cj\;4P+Cq@ّ/%IJR\Sx (UR.շMtsЍH,ADR)5 p>A .8 g me^dhJ[eFiU}ٳÞiY$\@l![_eܗ2P%)($ (cUy)ί:񬿵lMa1;_/,I%htIN!$,$(΃a!K6U! 131M+E؇u+<,خՊ'bEId\  ڳ˰rm)[Q;Al*`)"a:[ svIjMU{Aj+H;G9UQjL:~+ը HHHa> =?eyRD fq~5絜BŋI^n1(To=hG`tNΉ5諲)_MUg!ׯ|?jX_\ _xIxyZ2+3M씕F>2,_Iup&j-u:9˃YAJNT G: l͗< śwٕi2Kv3:?̘i(t9.GgS$Qfi^^:\;;XiCQ#Vwq<¢r~h ^+Nx [эgZ=Z^cM /'/\gwAa129=>YWc{mYmŢKH$L7;7 D_Ou񦷛Yx(֕|6\z~gAýמgel:񤴖ڱanvjs_eTw͖7زQV4j'ltiOlw\O:Ҫ;+UJPN8fYfͿMM4$;I=oI>3$sBƙǦ3-R,RmA͆%.Y 37*o'aJJ" U7=J@iuH#+Ip.%NNsA6uӝOMCW{`ue\w2lͦلE͜WtLqH*S rLheTL"':ki*n$եcwߤe>z8]LmF2~ſnfm#m?=+mKxGRlY'l xI:Z2]4l>e՘2ڳ%KkO=6YT}8cwu{^>bqb;-|_]"󚐩 _ w{{ct~ڃi*m,rӪXoVw]yLפǴCLߗ$=dd` K_CKVAtVm A=PNa7!unR>^ke ՀA$W4(gB6nY[)TͪQ|BH҆@RA-AmTZE1&bu2/ mRk%GsZu݌ǷYCoh!Guq7'TlDwb4F=C ໋ _>\C^0/ؖ/Rrֹ2'؉\ƎLNh2!Q:qp uA|IA0auV$)(d$Іł1fm (YS.:rY #_Ee`D[ 6FA=$ŏ' ||]& $,>J1%Y`ɔQyuCM t[oü,'2i~od})$Ad!AvcrD.5fl29Rt/ZW tq~9{)j5}ow!?o _\'y|=^T5ʏ\"F%*Gk2Hk/Ê +ĜuJD meMNl墥bC%ǀrJ:HiV֚9kvX.lgՅ-.*2I؝4?G?d?a2vh, RhE0LiπN+P|8h< Y ͒UUb-dT>caNx[QG+rq6L V1Z{`P"L  B@JC X ºl"خa*ad6\$ReH5[0r*& &ֹg>u}ުǮkDkĽF|Qѕ|)IeOZTl &]F ɱ:ȦmMd@K_,JTz&%+QK-i֔01kF TG֋JٌKvՋ^ζ7_eෛP|I ,no\.Զ._UHyZQ̴Z?6W*tlF*m@_1S;kN0yD!HƤ!t=lty^ZN,!@U}2 47ueUzlP2 Jrrcͱe^n snww5;gsIZNxx ZCQ_Bʾ8 B\8-<&dE$/G?nySAsjyaevm_xzB.ο~f{-5+㻗\}9>3e$(|bAô˙>m)(9}mv(*ֻ>dz l=*jqRM}d2o`///]1瞧_-c)v-6S Zjܺ)a.vJ'p,*U)П]Рl$ͱ|Nm-IAgղ'IY M [) Rp' )C*F䴙x^Zydue=e@M2Eɩԙ dv)w'_aꅏ?m Y\}ݻB\WsܳrlL֑3nkkj(7qH(ou191Ł&q=뤔tmFJ}'z}4蛪v T&lW fkv@iͨ?⊡mh\_l5%PgW4V?B5I/ LdVB.XYLXOOłO+i+sк<)}o- CL*Iy [s*);hû̫gx$L>>oz%LLu.̥*|a|=΀l;'Y Δ6oySbke*K%WAPQVk|$Wc^ާd`e65 p+'!Ʒ j|j8\XF;el:}mlt(j8K~|ZƛX2y^z).W}mgJ,;ڟ -tH|!]<{Ϣxs+3r[lzPZИKd}0`]aůJ2dY_Ov[>ڒr Dg;#nL /AB;EYw2i Z6hu{=:d)ޯ^NJࡱ?m^jW=Q?+w; :>+H9$wlS=o=]r;GV8uݕ'[]8.ؓǶƫ;zh?]ݭ~8/V_5;g9ʛDŝ.7J֕M~7>~J*ݛƛw/Տ@7ͿtT f5/jg ;aZmH`58h9Cz;qu$+)iDǴl V,=-vrŝKC!7a8ひq.+T,DC˪,>+KMz/\zx!q]6;lɻJ!2=x`9iO]Ea*=vu'~wucmj"_qw7K/ijff|Lowm1^^rݛ~3bLEöUwCAd *J"l\wk[{?I[4QnSkt6abFtL*yO Y?~n^_qP ]p}ݩ0'NI@2駷"+93X]-VJUAqVpY\!0B\u-rEJ.WDm#+餔""`+5P\cRcMP)mErN;"\j+t"Jh!c+OKM*WY&l5rEgh)]rbmruJTN&ŕ播Vcva1f6Ol 7s_7fև+foYblq{@s )JUD "Zd.ho=!\Z߯Kc6Fa'\gIIӞlNqHjv.=Te\<<7όn9L{zAɜmu(+̫}4Q-;c&C N8ND9ǬF٠G7Vq/,&sA.. 9#k]#gwz|y+f ״6mIsσOeEFv9 pdֿ!2'."q"Xew~L⅁כ r=Aၱi`3ɬX )&ߧϢUM"Q fV)ϊBoRI-_zf3[>*46\ak1zT} z(^l@uġ/Sx3%lS/{s)PF&7fA>Q i9ADqd2w)=BD`AbdtI ߮]^ 37f7e0:_痆p1C8ߧW逰Z _jAysQ ubj}arاq)bƏM 壳j `Ҽ`k TqfZWoέ.mQΌTLFWһ,v.hY=lZ/'Ed4]Fv~N $ ߟ8h/.䰱i1?u?E.FL7۫N{t{ea<#، ,ٛ8eW.ΌFIq r2ow_TUX#ŒTjMTU z[b:х@d4Iyry^M ]Cg̑/.7t ¼8s gL)|p{ÿ> )k[,m}ShSj,{8l*Xq`0H˧E:ijtC' КR0Mmk04ݖhf|=m7 2ކh߻ ʥV4/zs)W[?'אX-8q3m@h3l`=xk`>qcS7F(h4k FŁ!~z_ymvB.z!%fvt8lKel4-@,/aQB6d(§Y;EDDrEi!#?acb5$/ab51:=e[Q hk'ʁ JqTQ&WGEdQ0A[R.k%)$l|jƧR蔌O0Sc6<gNۍ=fŚ5|fSV &%7m^;BGcQ?#%Sƒ2i ,:̘2MQȤQ"bZg"v0y{8`< 'ɾ ٵ<\PX^`3븳J pPs6PcdFhh 9;%*!D!e!x0k5f,`(-#8FS2l=mj x H)jA2'@;a Ǭt^(d.L wvU5j7FhPp&ӻ_sa<aINz>gZ2t)}0*]~>PdxQer1hfD n|)N瓨܎&~0c'WvKqCow>sP`' w1ˍ]j6|(݄kVO8, 3zCr/@0AhC_Ӱ"'ew$0]"F\. @WW@—.4r׋. [qmgW4-o9/(UQ[Ndؤefv[y=&Jou.fs}"\%l:7dRdYum7viyoo"6]$격IjSL~\L[ 3(?Ί۫wT,] +JnݿmUݕ^{\+fɖuThu.צgY3 #[s: vfwyaUX2izx{tM5B]L嫊Rk[-TPhg|W[^\/ h}+C0e,m1|w\q"3Njeg{!4ɐ8cVLY2/CLnG272cG1HK&7"1V12E :U`b'UJ`E&7 +7‭iU"b ygqoR0#4'(xAc&㿐˧SW ڳoo>銳7j 9a7z6`*SL!W1^}! v2='`q`+c G8>/[>9eqcϾ_^.v0b)*Ljd<)V?@\1h,mDѴ3+SWHp&s />Zʄ2A}0A*1j 3Dr]0ʷǛ%əsu=t6o B* TדDճ~Ӎ8'멪LtZ[AJj2E 'y@XPaڬw =|.O>}-Zg[c% 9BQ0Lg ͢HX,V 1>K- Zm]9Q0!S&J;f2Rʃp-aqyֲɼ5r,\y9DT/r15(|_6bI[OIbU/!ӠtHe T&('NBa1*0pK h aU8R*W^)ǝ6ݸ[#$,C&)MfANŠ d%(,zG11^< !nwNt 7wI\p<\0HbBzXFcΤ*8&tɈ5 iv`,#V_egtR; ho{ltTk P u|`'=&WLv=!涙g: (&>g@-s$*kNz*;1CwI9H}>)-4;v;vu籫;g3;ga)4PX R Jf7 3 2;SHC3KFD_A׼™B0o0 ˨ր'5AQ1}L`#h[jc"{&CD )1%.DpCLDl,,xGI4 ᫭۪ 2q6?㛆&7Yj DKTYQ<(|ޮˣ4#^95B1DeT!"wR"g u&o+Do_I5m3׋sb4 o^HSɱ{dosP6(Wgvag^x ƨ$*ƨVRIVQbFCvw<:7_N *SY͏nGE6vaO2X{o,z.֑ϧ/3Gc+I$:QWKev:") GGw ' *+F#Hx0$X& ` iۯ Q$9 Tr%vLQ:|VX)ZX(9EX}4:J%ɠP&#f w3-.}>EO'y6c NTQ$$ڎ[#XHCr$E_M-@|,L:Ɣ K(2xkbJBmq?FNuQzs=icۍ7k:W5}qG\8z]5psEkPQ]W2Ke߼!Xn4맶*vT<~vܙhF״׎V<@&.+) U8-iy(*jc+v:g!\* ыICpc$Y*I&e1Ex] . PKP{^1aMǴĪFr Uubq`c"gPt" mNx]#믙m4.NNW,XYS3ׄ*yB>&)glBw{ (\yJg"Q,tN^xX_k4SRULյKŐRcYq2MwTȺ%eQV^e*g0 &wZ; t1 a,+tt7_<08Y5$N5 *Jy@n&˄C*`\̳PܫDLPԺ|>yEr1K2V]oDO/_>4ƓCI |d?z4#Ngb+ވ ku5ҫɕgz-ɟ2_g^ ';[828R>s=գÃ7˹5^Z Vx=?g=3TjHo G:4x5xЮS_Lu#owphڶd̫ɏg~7+sK'?_m mgeRV_Ȅot[|T׾Q(5U * tHqcE\E'-]`1P2C޶Z=.RSyKF#>;- ٝV*o'눟qBȄ5q s2[UI{hw?Lu*ONB_t=t څ|#wY:Ox\dXY;:;8MMbmGQm))fɦ0Q'b+t;WWFf:M6.ślht?fſO'-0h t׳]. l,)(NSc6dZT<6gʚ e#XmA } g㮮_d=S Z>-;@ox6YuDPx {o$O{ozwLhFoǻ@}<\\q6=n[îuL7ۤ/PǴCd_?Z d+T_c5K_BKVـYׂzP6fbQއ6n'gOAgO~#/Y8C?*<8?|YY`RB#r{tOx!r1PZkaY gB>7nI}wzöe~mQ[}h{GmQH%ѥU0)*q;0ESes;&Yƌ^>CYu6NŶG :.(h;R &؇w=k)LMq0$?asךK $xQqm(SVxoŊ!AU0RpTS[]{JIG(vYVsT%gHPH P0{,d!C|RtVo# p7rQYa>ix d6z.E㯏JdVLD ݮ6^GCrI@֐]ZsrҢ9U5GEE%j zJeVђ)* Bz_J\מ֠j PLv(%Z˘fLk-ٖYI9 ->kۍF"iHVf /3 -Ak܈HYn]lk'VG ]$V\E=`98c)cZfK:&s ͱ~.3̵g-xSkEd93-m8I5bS]A>zfṵغ&\^ Ѣs ŀ4*ZjH-'ZGgnp6C6[R=Eu]ZJ 9v.f&Ӹ-BavnkKum3BvZA USq̹#< d',Sdnkħ^7vN!M=ۘ{Q~ezg\fr{Sg%[{˩cB#O*xmQOkcҩ"@%sӹҸ(aR>R=9ky`Vg}* /)uśB2ϓYFa O#LqG])<=5VK?Liǡ63vf}^Z火Yyg\{zk]g1þwj^mss \۱`{6}nwo>n|'`O[[5Z_yo1ywm|Ku9W/tÑk]MPǰ3vٕh;5Ga%I}RKsuqŭ+/+G?zfB̀=e_Zzh/ {'[+z]h1 /l.?v;|.%EC0T;ӰR`yǹom)4Y{.GDw94A Ա3y盯o{ǧ5FCuEyrWG.7k4$.NV'Wi۫7] +NcyyTgM<ɁL?G:ڲZO6ev}|k;;i^zW<[&>vI|B}(K+oeb>M l >MX /@ɘu!g5O >5qs=fU^ٙJj[s!+F[uAur=꜌%xnu}kt s]6"G;sT"X'4 BU)>drM {k,ZaD@w"R4VfYt7q\o}I_Kͺ#nG LcmT*Ҕ XE)1ؽtxi RsԦG"?0QN[`yU벫+䎶<.AkkcnuEѕ;=(4[5m%E jਗ਼XY͊ ,c  ҄W'ws Aq)t i2 *(_ȱ찳`2ih^FO-V0!ЮHVc+#6! a` !@ C=s~7C7a?%c 7I1,'d jXr`FD*F*AC5lD\`#(#My֞6k%Wݳ.PVg3"arT4Dr1g#EHvo0a5J r[O -hg*I+q r0]Zx _3fn~YudlNX[yc n7 =l$52 h@fV ޺[6Rc뭪Y>i,edĿYJXE /^4 G U;Bic=Ɓ|th7zt:jFf,;$ n=["(9 4#p6 ݴ{OSIlf1hmTkM!jKBr4BVO ]r31mg0aBϟMo=+͈=Xuk80)^":(ٚbȇ xyB1C[u7EiI(WRTz댄1lJ̈g':SC`E7oɰh*8*,w؊bDtvȓkn2c`g] +U #ء0fE9FĹE5X{aw:P9ZsOCuPMq19f;O3ic bӪ &_258nɵlB.d?)'{֑Fw *Mf,oj%faw&*X[dڀ@jg`*ga2SEjaӚ3 ^DXR N F8@=#KD=k9G6fTS.ʝ˥!1ˡ蘕䐽 IP.X4wP@Ւ0[`4,?u "jinJ`j?ozYܼZ]}2)lb["j}ާ~岩L^]-Ѳsؾs: Erj>s\}woz_4=G_\x=-{Zuzz.'JQ-`BtKB\S/&_Ikym )w=8>ѝ=X%S4~V}QG^; yw}RԄ]˟ȼ'^z6+Ktb[S])y>XZpذӅN%޴Ǧ]׷ze8{…]U"4[Kf3?/f*Gyt7GtwW揗Om뻂0po.*1GoYVkmF_E]a 沗6 .k|Ǻ%8A!ˏdkfd$cK.VU?VUgo_f˾"8;_)=>]82{pt_DŽq%h͕5;%qۺCa59;=Mz&yjAQ<2 $-@Q XѠr|ϊU+efi_,y^"v ܛ;^RIռQftS+m;壾u9 }E{7{ד } UVtj;C@wwɻ9&t3Ln:eZGoу KҮmuH/?ܖ*sB:tUjy tŪ[}|]rBYBK|4%I=so-h<^T=YN*j ;d5,&? Zӛgg9zLA+ks|fܫfr5E·l~`6f:AI{.2CmxW/J9~7w8G? ߾DQ/3*a+%̛ec @IhcAA!DɢXJGAd,vd,gb/XȎXX(?b-Yj.Upfy7 \h@hPS*ɅFJƨ h"\^RyFh%^eI0dgEF8j>g6!MQ3uBߎ̄aU6;Oq< t j#j-M!yc#:p㌦B)'<,k{iŨJ$vD(Z5D.D}H $(hV)4 \a<,v<5fǾ( #8"7I<;oR 6XSC% e?D4C {"*TS A("(Z?\NG\WrR싋0.#.qqċd\JP6 xԣh"mHe9C{Nq%pPt싇0 lc8DFn~-790YInWTlɜIONT= n@E%֧.W֧̭X 3x.DK1G fMw8,ulAYk$jlm%IGi_I*j O1a Y `I)Y"YցXЋg).b*(Q7`;;ϼ D/7tHY{&Cu1hd`8]tlLOA;ŕx6 *`ԛѦJ~엏Pe1IK<(#D\A#xAME.E!r+G`8+Q;+&Q@TTJ#*P!6(IGbl==9x54%.j(VcL&k{?Gxj;P_W]3b'TYE'=FY@'3j:RH ASx&e<sD ydJV $w@5cގK\SЎiYv{AxsG)<\wa#=p'qi>hD>!M#xpmҘRXG\Y=:Qp6d~;0,d/m$B5a%0tkg3 R>IKwΤ 0,_ea84,LHh].RIYVu}W?'[͗N&oL~[J VӑCNRBkm=HEXtXUtj䖣Esq`KkopT羇GHpDf8{%=2m. GWqo]71|hYx=#ssg~烟I\8tƓ^.!`9 PINZ̦t"n\7I-I_DidzT]o,sd bf0}d ~Ϟ8> ?~`냛`|D8goUX'O7mxruR`ЗBS}ʞ`nyaYRnk=gLwS󛟝Olooz$-I%rbx][Y)^x^Z|q==*KW=/2m>d_hY>s@j*UZ׭ڟճ$8|-V?pgu69@|q%4I7>U"Os9/ V1 a;z1x8og9:/7u~8/-o[p} %HkmyyO_Ua4YƪI;3QWwF-iޅ*MFFyZA:q饻-^eP۪{7gS[u 6@ׄ0c2+dC!_0KoM]tf̊?89m)^R|%\eS(\ /DX߱_B8k)h+~>o U@"C[yhKXo2y=9:lIMcP[mk|aH6z;ul6oy=i'z݂}>m0Kmnl +b&-l#11MՋ) uٶÚēel|-n(> ķ#yw@n^xFl!NcT@N,: ^CA'&QФHIYeދ,T~~ dr}sgAhH.-O&EiGf!D5;ﵷPBȜJAe  ;@dc@@(koE'}IxB*|!?#y!윑d'ZF$\YzKK( TYܢuEe:(BP+hZ*!1@Xtu^89(}V1vv8L1M q-0!=y`!͓{nyKO't3KsN|z|P^Wv@)KŘ}KEb}t1P&#,1x>9/}hd5`B|ԳS $ 1#4HC1Z(C L EH$JkΥ1eì$5.Fhϔ&, f-*$R8qֳbg^zHjgzUAPs%K e8HL hjH ©\Dih2eUswWN!SGAI!4SdRD :8U'BS,MbVX~OZ)a H ""Q7:9aTJB!SDfUAQ8*-L왖J R|x2MM6-2B 5 TCJ0ЁM',@wdV." T}0NP|uwPˉ@36ij \;aE>1$(p&-G4 !"zS1V)$d_Apd.+B (f$+;Q!;Ǫ4 ({yFġ$(3l<䒣Ol 4eI)I 1ikM&io/"QhHäF2F"N`63KP*)ZEi;;bX!X"{gZPtK\t:1Α+vQټ]B;wLAfNS6.{vWyP ֒ aZ‰dȾF' 5\e--S7w0<{u2LЯ -hWDӂpOT\M)g=DL?u~3GiXK.Co% -(&PdJ2bH1@Q/YSTVzepbg{ʵpL!*mȹ g3B]HT(rf(#6"j:x̏I{5 &~es̞her8MivV,8$"{>70:˱PCw$Կ4}U18烦?t2DX'Ԡ32_$;wZqNy8q9\| xu#[׺Fp%+qHXh=d."u5j/TF8p9 CN`j iG Gzu߳n_ƍVM䓰S+첣89!J3cW6-q%fl3wm=h~T?6W/|\\_Y72\_4lƖ۱] _.\jpG0Zד4= xOW}ݰnf]XB>m`<1jҔ#+d<z~jpJ^k}<+Bj<'NGrϓYȤsį4ȑSzBf]k"o P&˃0^8᧿}m7O]| ś|sop. '},,<3x~%+5uwmH1*;v .d'+^,9,-K{85Ul) nժ^O_߫X]^loMha SB '>;O=aIlPu/vM͖IG>R+E*(RXCb7 Xk<fM<'.o'HX%VX/,zTp1b|\2R# HZ< 3w'sv}6h>1xȩQEo57pTJIO1!h0 :99*+q}ttt8쬀Ua\w2L73/;9)5+.Rs׷^eG^2~QM$ tJKA2f+/^'&:]ᵴuvn$Y}˨;m:.:X+#?׷c./]~xvWHoR50RH$!":Rz:њΙKO`!S,1q-X{rNЫp*uo~bEWSE~=!t聯\>Vn'c[KWθa%"[xw]ߓ^ iC軾M~+I׈YWKכɚ?nj)'Se {אnzz 1C5eo1PRbpUVdVZs8rZ+GPQCq E9҈##1J( mcř(Y5א b+\`&A.u),<#Ws}^_fuݘ8ICWK=Qqo8;2=/^ȏ'Tk'Ѵ G 11Qj@{Y57L *^[e_B6403a$˛ 1Sf1$fgT`Jh "KRU 9funō4RGC,DO4P.g-9h'yc~*J_gԲr`4ɄBdɸ+hdEhqci'fדgX׳L0:&3*^=HdI#!12&tɣ)BĽs` 灡2w<'QQ1! ٨hAǍ=`؎r̚-ZXJN,/ ioP5̆Ms-MZd}tslѡБB}:TnNe~K"i;0HwEة1决IX`i#}77mWѥNxYK.>M/^,喟)}/??OZvfH{ }Ӻ%S H-)zݒ8s_5:``> zAhhKij ƢkV2[vBM^43rҵLUԺau8}mOL~1kh]L=ao4QLn':%пCmz^w]fc{v8u0uK})0>F24.{#=s$eWߢM<16m3W"Js%. uK^U4'6دS8Rl(>)i 2)6ãh.7ۖZAl UKI,tݓW @@Jb~zGB@,x)[9^VNzX_a]ZHbl IAoS>DE*5[{Vvicǫxm7GNe@՛` }} b_Nϓ*a"#m\&KvFe.uœ6\|?;<LJIAAs{Z1fˤ6@N*N)%sPRR2 Ì7ܦ=r18-GL\d!GYpVzP #i;KPKDmҾ+7Ŕ5?j&%Mbݙ6}LOigUW-r ΛLO N#(w;^V<ك-#:>=R~So<^W^ڨ׭h:>f:xSoY8xSrW\emſNm-FF}Zuj /[KC.m=_ZW֛O7z6uo~eE-l^Ooç-[o3I!o1^On}؅{ҵ YSsYmŭ~_͋F]}e~%,aZKہ,mhޝ&^.rOW FNcTʊ)[ V\7U*2 mO&7 ʻMRo6rcx\IVeI謏PFQRMnED/Y+F7[2]'uׂKT젅MT S}i@|?uFS$5<_wzk(癶kM!@&Ƶ oNw;&1Imn"n,f Ly$peN( EA ##h֐j\S ژ(^zJ:@+eЙ8OJ+!@BLҤKdͦ ')YI,(1SQ,Ɣ&sHR&_}6M@4&Sȱրzu,h1M\xR+c""$apQTQLzCPbb9bN-,cB5#\^r>HY.QhRd%3ff*M\]CpT3A!En tmS]  KlKy!7:)Yq=g(M0w yA "umԒ,XXc)H2Kk'i#m6 ȣI@̳6CB^X֏5[i ZVw?+Ô09<W)΄ߒm!6mf`9SHJlr)1w9mTA$";g\F7j&3#Yd&D#,'^AHNx.1";.4 1q7NKkHݻ[лJjvj1WCC~h|1˟T<~(#v!hJi]roRNIĐOPB؁6;Eo$7AisVTn%tVۖh4P{XL"S|_3R6|o uXC`*RZ֪W̖CoޱUVizVF]=IJdԕy2u9Ww˸3r?2%o{ #8kKn*|S{PꗶR;?t$)>jjʩWuN(J}ӳЅt .]VQ_~~Rh* ++"t,8n ᔈp>g8Zzm>[TP{Ȫ~Վ^Fvl}"Q.l fE7Sgb_iߋ·wmힼX J.toSgOFk{d !qc ݻ2세 ,tYOi9vnV4fo + RWE`0'NE]i;vuUKi[uXi:!uUu2ꪈx*H{ꪖUWoQ] "'*Қ'EʅV]uuJdF<2XTI+*R l[TWhߩeE8; pT%]>+Wer\,|*`*T9UFXUdBH<>_9{<-8\q]Ro6yYz TB%p!; fQK#IOsU+K1,lga #jq"i[67XHERWClQdVTUDqS譊s ɧoՒ}}bO߬6"0+⾙nӫFb}Η4jsXW3cTI YLi띸 |tomOs+ XljaZ5^lh]%.e?U"elWbG--+н%{!c>0ZAR&Qg.2N cd%Zt@ Xv40j4N91%k̂V"Z) rnk )Aj<ùVR_yC3ee\[32hi;Õ ځAW̾b!H')#)NZϼ3J7^&xAEe80q#+ 1^ϊ=B`N¡AP* *Cbm&&}VRfjHܠwjϖs- H!AɶVZE X6JzK$G:;#;VT|*]v8.j'vy;,UOtKk W5ՎM*shz%j_ъ&*(7ʙ$hA^yvg|V=㶛gnZEi!Y(+*`t܂wM3&^k"v7 4!e/>&2'A̅lS+: %\9uN<0>ߝS/@r_'ۆǗ]_lE|!Vx3Y/W3g`K/5B&'0όb<'$S8iNs]c69R*]L!TIr`@[8 d4mFث;mYn)jr]Y',QaiR>RC0ٸP)qF)iP%\9`"{+AUH.:òkpڶ6X&Mxŭx%QB dP%1FƄaü9E*:Z{=THo=T ^4qMyem )~׺k!NJᢽcNLx+)[x59\'_KvaO&,T.k11N5xݗr *x##-z;]]#ᮑF»ٮ+ WWxu f0Gsdza t_ҤP]QVbE2u`\bɗ˗^NDȜ`L #b)&8fk'!}*JmW.! ȲR\3d `{! цv+W.+zI;/g4&-.)t~YE>'>Ǣ`U?ca"-2XMrʠՙ+:&bsXSǚK~65IK0iR&hIc̎)\9e*I$W^p&ڍT5)aJZea.} LD9̑G')e9v='xZf4);d hRA{T.GAiu}̮vvh+cއaa4:ѸONҙ{:7t Tә\_ũ}g~Ofq1,쳭WdU©+0s-EV"*6_6MӇ^s[!{EQOMb>ꍴٵiaאTv N|-DVokQmH7wZ֐d &(`A(y=(#dC:~?gcXpAigs_| /vwض|@cp?'rR4Mƣ6SXElunVߎ yLM靈t],%jzED$m:"ѭ1feOmاWbnpF_FWѻ9t9f!>wFV4jiwG3?yr+#wCI>k>I!Vn^Ͽg&w>[ޜW’5zh.EǼhΧ@u;/3uΊJùj>ܻxl+;1jDŴ+^]Jh,v$M]y8p.7a8T\j V l2@Q{ fqQZhL݂.p[By>r\69'":AE,E-s.9 AiigϮ r=`5[q2G@b %aɷn6` S۶ø[#r=vatȂ̅'EHW1HI8.0L% w1 1̛9asԳ%A+0>drN%3w83u5b!1wW`U;T}n=5;5YˎbfG-[m޴ו[lF: 1=9N㘳lg*WS+kZ8JJJ_a%VzTbm@R4f.ŃUIT YQPkFQ9/aj k ҕ]ʮ-Ϻ:=DunYJ; D hWru\loI:L KLzbͻFe'nT"r#CSHZlsGhS: >[P<6rȾU671IEK/qK$K P`fl'.š2'aS<(dv;UtiR;C$k ^d1eByZ<c ~SAʼn5ǫ%a DXyJB$SR{UID'.l,=ȕԷ&ga~6B_ :"6CCz(j۲.:NdKx %^x :t{j\ :蠄NH@h Ox)c!2譊s w_P) szfTBxHO3|_O3lKO-g9I~uε5I[zSL*erWbydIW~[9 StPn'Tw{܆:}z 6V/z5}}qfxb(B +0,W\A L )Us ǚG!*AKi6_݂}5-~cC^+0 nHFM>&}]&Vxwiëǫ oIJ~Lr kWth)%8G|Z LA k=qɮoꁃ>>%]A_z%J$-ͨ+%${s`Nf\~ۏ.y*a4M/h>A/h\c;Li)1|F9TOZTnS#GA-bO7ykW$&ux}8x Nc{WX G/h!L ^,] u}t]| O6l7LRӇyQn :[ݿјx636I㺘;#b'N.'@! GyWg5f~.v/<6ڞ5n2.CuC!:y@!ȓRx6 rD֎}zfDEBXWxλ<.H:)ey) ٻ8n%Wrwi"6&îaQft5([yX5hDI=Vӭf5Yux,V1D҇e(K\S^3^+Aۭ4%3n523WWW䅽Ǫ>g (uZONQ?)j+QOuz؞1 SlϝsNV F+8LQuRVgʹ$= '*j%{WXXVջ\X?Vħ,:Jx&5*hsC4UHtNwhru Bh,Rc29z]@1'T\ݠUPހG_`-qCTSU)I(k.|@3/o~PUWj)"iH̅h2V9R :!CVQdu˭f=#Uɂ;{",6z<ʊ{/H8(cIЉ=KHYh6qRdVAJ8a1ibY[l` (wUgC;>ٖU[+K\4̅$fT \\$ P>!8 QG`F nkV3@o̹h|Et6Hn&٤ћ.yZըV ~'lq "(7sBF!8ϥdk8fb*IpT^VӉ-RyTNfL<̤%+Z"bΚX% L$y `mVDHSl~0 /WME5>Q60'"[#3'dS%|IpDrȳB;IhT3T墋/ 1EA/) 6grଙwk4 Iɋ꒡Taz#~Y{ |԰-dt-R)WnkǛ|U?҉痛8:9$uoǷ?|ݧ 4z`&u"\mEaU֢͛V޲ii&Mo 5+i9[;cߏi,/. &m2kWU)p=NMZ Mf~<MxD'_I%$Jx:!#|д.U6q/?IT&luzlrHJ Ux3Z%}h-8KC$[QVыĜ&m,AR3Q!,%"7#Ԛ )yL,kgMV.Ykc!`\;55q^$n tk>$[Y㾥@-[vtz#3ϯ@1bRV:<>;˯Zu5^ƴ5 3JX[en-KS Y~`/uBX봽`ٺ=9xΘ)VB$1!c@A(0d!Y2n{=YXf%Y0KZqJ!eNܓNz)IP+jl`q+duaVZ6$Wq+ 91yΌ,ᴔy3h!Q=10 Y@vu~ּЋ#@FBjimGll AɧҞ-%FczѫZd2'edzӋ.sM"A[s'uYB֦V?+>]x__ߕUQQ-HD`eniXcd`A}n<).X (JPyE)K.z0h0eDl&ÍkEF+bmd&XTj-VN0(ŀ+ʻ 3^|~i;~z1Οh:ۂB FK@)%R`F&!B 9 %aeV I0I/EMi J \L#wY(ʙ 6"[Ĺd.a/-_ryQZo+c pDּPPYؤ21XMRM̐|HQb)RFCnF)Htkʮ&x9rc[DTQ 8 mZť6L:-AN-#T2p>Z vl"e\&P" )DР HP =0!>WMi]W:iɶ+pqKcdAj):䙉Yktys) 퀋}jڱ-xhGUz55/3<ȍ~|Ca]ApCcGaۣ̓Vj˽鞹j\~6zMrz.dS2{kG''˲x!ߨ7Do6J ^h.|lz $nZZe< waz_}&c5DH~2Ր>W{R1A꯷~oA~b$0ΰ"42,HiY,u U \I!WEZWEJzp'Rz*rw*R = +-J+XK3pU}ApU}lmx8\)ap_"\A!"-*ꝁ"m7􋔗֙z9p;cw@ w';E^|8\)/m prʖsлĮH`+v]q΄Ki>\Hrb;WE`؝"*ҺIIjo臍^=&v}?xfgu)3]SJ/v%p5wmDw_2lj^X tm]V|>Wy-*>=x2׉?SFT7$Gz؇Ŧ0bnc4AuFeddT*  g{M\8g{ѲPk- jTm,>AMV 1먳dV o0ΖM) ZOn]ƅYB6T cĘž`OlZe3+%ECm\fֿ7Fids6Öˏ׳~ozmWS3 &!y @2`TfҚ}h%,qt1V PuI1-fbdRXa$cq%PqFTWkރQXIXdTɢEWfR:)l6H9UlrR(,/!,sï/5dw "]gD+. dN*ɦbRd`䧒S""Y*PIJaP, jن*nmچj;q_.+̓YS4~dzɅk⵨ M&?-/sٻ_ήg[E{'p#oZd+k CӁFl`چnBmQl=i+!+A/n-#:|vV&OP9P>7zy糟l?=jIa\ػPcMIDCRޤk6QK̓tj}!U$-vIpx`bMEXnyM$:q<併jցWЭ{.vkhuv5AF9/Cl/6ka\>Wҕ%SC1N#WfCW֚`X%w]oAv/iGYFe09s ޲t}:M_? nw73߯c{2,wZg|N%(G{bTǨڃQiB=١OQ/S~(ԯ oڒ8ZcT&` st֚&-J,C(*4Z6€ɖԶ` \,_(T`eI" @M1ditbߔ *i~./I:z,xξO{Otm& %QB̠K Q#FhbQ1 +7bRXy %\6JPBEjё6#m)3[qrkKz$_=.ӳmguxx?n Er[˸ZœE$ɫUKb+M5D5~2^QQ xh[UϨy5jM jYcZ}vP.Yd/% h:&e"\(D4eQ}8_Ys|R_Ueh`ζ)\ >VN{PGL2e`b? =Fԓ@Жi~@/ ě}>\g=_|j4g+$1s ZPp9:YvNy Cc;:N1֝vl_/?%ܧ+g3kDȻz8꧝}vŃ?JL˲>9ETF}3[;VdW3r4[p0퀦u~YJ[-~'ڽ›ߖnGt/Vp~Wdzh䆿W^z/|s2ap.!Us&u@Z7:RVFy ꭉev:][nZ.EoK1NqRddn|Q&ÃOL.[Y𴮇wN߾JdE.OWa3y^s+F:;{۾p6ۙ=<īϮ+,.UT21j7gᒇ&w7/}{3e,홯eemeVߓV:;w}Xw> Z`W'ע9s_vd%O:_,N~~Ow G .^)a+vqBYw7}wVTOmZݿ|Nty/"&Kp㟷?qM(r=~z}U#^搸:"Mh~VcijY|$қP2So~|v6[<21/k8^j $ڸ˧uQ-/h5qޔB~d2C jK6~fvO7ZS(S$9/TAC>Ur69;Y/W |Q[tM*en 2 HT\,KAyc#6ڏ"0\}+R;WEkzUO\H `b_rLxSG5*Nv4}+*[B_PF%lsBm29\z{Ѩ8QE?OEq6F((Lc%#Ƭ El\ .ܵYš`,𙁅o]oHVt1l"$h2#ńˮhR-8TQ=bs؀<C;t zhkɐ)zip ]$Z: a{ظ@Mѩ-Ƿ(EB':zeDUXQ{wPP@)DTbE֭@TX"~orDhvx7.3kcY@n dRwI $L!JI J"gǤ6=$noY[3l:O ))FDU=$Zvrъ3@`0hXiي7x]ImkY>Ubg@&@ 5D 5&pB~#L$)o]񌩃 򸬊:iŦ箆REN5yڇȳ LĊ*;٩(MYr8QjSsu6xk-GXmmlɣG-vKdeEָQSd],">F ;/ɧx5!Y֙_:]\n3Ʉ /NeK"fյIck.!qr#aѧEASW=J|B_iǚJ%[*h*?ͥ7OQ ɲߵЪqD[o^v}Mz1¶6_f1jy$+̸R5بJ אnzO|vK ,$gJ@ 32 u)j$'=;L9xzjN`94b]/AL\R2F2FBSimT2(%ȅsa=76p*'2Aʩ<5Zks T8o 6,tk> %[ޒ {ZeGOf#JyĩPy|i &DH(1/_eCzy#2L ΔBӕ:U4 Qu>Sԩ::uPL$oNRBd&0TZD$,¸w$QRȢKʹqr U6F)D;f^뜡רDy Ԣ؆[#̬Qg1qv jCeY#vAdU,PDcS{I3sg`$FJ-aor<8Q$oA_!Z橏p(:#Sh_xg~Ӝ ,E#;KM1=7 9HlrU{)i=ҮgNR"47.N e)4QT0S9 ' VٓMK"r3CT\;\qk@:7F*ZNXjQNK}&DHP8MN8s(f:G@4 WdX 6@uF$|L)W]MN,O)x{*i#LRa$)4Z2NK'8$sCsl:=\{w"Rm h; ehpt+ zr`xTs>acj*\Xy lٞ!hoE\<=OGx$ JG*jpFj0:FJd@=+c€9Or) bBBe80(3Y@TsNBCSxǤ8;Ή&WBar2vV #CȻFh *;5ekcQOׯ(Υ| *E"g(Ah&'(0൳P|PV(Xj#AI.Rm$˄J.&3N'Ǔxڜճ_=ֶ}&ݯ=',9iʥA%_ q:KKi?*"*'m$"*NYX>;C< *DL$rjΩr\ PѡC0S:-S ,U ur)\ІH;_$с>ip{P W8cKؘog3` sbq6]y8 _.w]{tTH&mWc̏ͺ!bǓ@]$ E&6yg䝝'!~"Ճ\Ǩ~d_I|?VXYw&SD8/j$(YneGcXϳzg_o ^jՋ` ?y}qIs֞#>k.n| e|v6,u^ ~n>K4h*xK}aE-|2660xƑOD#|eo_nǜ6J0__Z.[B&s0~g`X =6e8k$t!UcS @ܟ*ڦ"u ̟}S$a+?'Kn1|"PJ}m,kش8.9'HXA Ӛ#"rֱ$'Lc+珶WvE?D)aD$h~P'ly|9E'L3tr2t&gd8U;_Nl9"+DF^p;fA >^?o )Gl5ObX5ڿPNMB>ꃴz=|(}D)q™5Gl!&b_Qatͦe I&K1K)j!u1L/i%Z<%"D1 3YfPn>hv7kpٶr5j˧M=+wvVHlUɖڱrBnm:Ʃ[_ޮ.mvkWkqSn,]atՔ^=hzO2>uX’K"ݫ6j.|j^+]O_.:S鹨wop7FP Tse]v/z]Q|'Γp0NioN[|"x9ɽ:ߍbRVi2۫IU2Z%=x"r;Жt|ˍw:Drc2{\rМqAWX2 HgIHLz,ˣ+Kie&PZh.P፧:Cق5.'ZZFb<΢PJ.(5vKg{&]\.Z6jȾAm }y0U沇%`*S7L!bzDx(MաNfyGpၳO,q@m+Qr6ЊK{zz ppD{M؜JtIp }tw7ƤVl}ҵ .ti}-c ξB1]1=FbfJFaR sLxʈa"mr1^C =ے:w\3ž .Ӹ0CX!|AB]sh;Jd'|x"6GdP]7WY\b47W\I!QGdP`t\equ9xJusTЛghGdP`ј,.c1WYZn+-dsm_!vz h`ga;;~۶t{/%ˌmŤ$, AI^ `c㉮GY)ƨ@uŔF+^tb .]1&bZ㇮+tSg Z=umLյg]uu=]uu\?F)6t-z.Y_pKQ{ WEd\%oef6G͑-Y\{+͏MNgJU>}_jwo'L w3.JaYF|sk$_ϯTƞ J ˍ?ݙJ/%*Ui3V1ft Qnۘ<\O %ry}cQ=hT`*z\$?7r\ZP)^P{ j͐^B>ՇJcDDQXRGe0DLkУ z2Fe /銀Q)b\cJCSt5B]ItDbޟu&]A9t]11O+HW!]-rhQ&]4Sg]^WL٘:j<2^<ꊀ=]1i{bJ=uǨ+땋jvqm4A8t]Y:cԕVKpsuŸtI~bJF+O vhv7H]ጧ񉞢+D jիU!_W=y9t]1Xtt.zg^昀#R /85Sn2(U^^_TG ;.z:Tx;$H1- :jiP6n93T%lSqҫRv !ϲS%EzU;AUGn M,_oW"%of ޤ>UP9*ehc>(99BzZ]>(&{M Mߌizߌ)7{E+qEWLkuŔ~J$n?n+ԍuѓƣ+ ZKؽSqEWDaAƨ+D+^E;Gi;L䤫ʢF"Ktq^ӉV SJ;jr蜍 :0]1CSIW/+/AƮبx: Sz3J,z9R N ^ {~m_uv4ӕ+9Тh-gt Ӎ4;ZT_+n/0-Tw[U&-ݖ)cjb VsA}X[>(s'`yRpU4"5vfJ7izJLp}4Q%z0CSt5B]QkE+6E+{o7ھF9-']FckϘhtEF 3ȔrzB6F]mJ[HWl@DǢ+ϔJMvTDb`Ogq}4"Z7t]1%IW#ԕ3/Y9VWl" 2i+jz28F]yc_vcuE>]1f"3ף+ղL+)˒UNP˿U=Z{ z2Y/.Gή?橡ͫ"K UYpҲȃ(lpΒtu?_nWu|<_هj}:/VDU.}SOUw*c\ftթW^|iMY/&9k_oi<*WkaWu˔Gucu>cq՟&?jKW,U-^ֱUSOY !|v*PbՓ.~[__qY=9wa.S`梸uͯ"2_'|,?ۆ9& ]#6^׹{֍qd7ҝ.l9B6;MBOZ|,Dfֺ6!JQO׏E'w|"84nc.G|q ;xUJU'}<|}?R( _ E7\TQvk0F1Cs^S-eXtg<^v ,H> q.ysZOIZgrXy_6^GqW,6%6d 'MO1fGeya6^z+GZ'?9y rT#kLU?Kݫo;սV8x(pASy6oq -toO6|h>jsL7^ ۡVPkm$34]#i'j$F<"gk$u63F ۖ2Q%z%y˛ݽ+kJPR/8_ksD/[J|у-:٢MI}F)CRN"BAEqd5WHyݕc YZ y ˘d6A.ufm"GTwifdSknGq/>?eJӣ>E@tE*]1i ]WLJ:@a"xtŸc3t]15F+夐ǣ+abӢRIW#ԕvƸ:l-F+uDWD;L 8j2^K FWkbZ7 QIW#ԕꊀK+}]J1 S(trI銁FWk@J >bJ ]Ego }q'\%zU7ھΈFdPwЕtuhуRtE |4b\eb+~u%QzxEo[?fK'G?9vkQZwx8P%#,~,٬,\ׄ~n *tfibݤ& 9os/)k#3bkhs0saf E4Ӎ™:s le4:KtĴ}QZ):at$T銁MF]ocz: qGiǮR mVѕEDLC lz,?C ']EWn/0Tb51DkǮ|U캲-΄QB]u}pʞ&2tT -z~mN62Q,ӡpZmPznRGe0D0(hQGL b2FeqNȈtZG(pF+i F+pQD3ô0x]1%IWԕ-ҕ#ҕѕ3XSN(tuodVzvz7@{s];R&\&SL&ڌVVH_ua{a޽V j^/T^T}P~Ulw+?<)}P ꒺_ Q;%%+ %UouQDJ[}Z%}qY|nJI[kseCioͽ*+ʬRZyNt+Tڢމ\! ( wVTSޝ҇]QoJ@u'@( :.P:nZ T U.>ZO]KS d5X$Q(|]&.MQd P{a=yb.\:% 䔇 E%L)l(*T>A-7Ilf "aBVPBpҜ-( ^٢KAEmO)ymt؜B+/:x)S*ѽEP'4PaWRK蠤؂|`kQ;usH]koG+vi>2Y ,l/zBdR#RN=(: ebBvxcXSU=NwǦmtۮ8U& J$[PyU벫ԕ8Yɠ-],@8քBGW6KɺW6 Ņ5 -cwk.4B`T^# L|[ 둔ٰ֞C*ZD@ %X_;wMBA[EtzRlFz`'؎~ok۪nWHFdFfC2!խl\F}YBL dJS_%Ce:|=Ʋd2*zbJe5͐jPoB+"X2nPƊ.@5 Da( VCH(PED&TDE"3|5gAx[b΂G^' sG !.A f|)*)^VTz#vrP`I. pqT AP{ TtgB@Q"Ł.0͑㶂EUPf=kRl eB+mՕHދֈj{ 1=gd0[QYjkIk Vh8('Y,  ^ B9TE5]Ƞ 0(7tr3RTY7ICQŘ@QE!i$% !pB6}fC|ugbPnv3e!]_:xz_ UA׮z|ou/˺ L}6=Dfxs:pvttU2Mt%C RbtLECNZ,J茸pΠ/AXżۆjbPb$Sa>F̮IѼ ȓd}E M2f]bM=A!%DB>hAjy:EjC$*KtPK|̡cLGuu $$5(},C),a3m>%TB;뒄 Xut kR([|Pm!ڽjF,F,Z{656 (D td!."(^Ib&#Et4$?<(Bjw7qVUp*XT*UeI|$,Re9QbզX5{-6?XIWHڳF& h%A J"-Ei*5.-zrDu 2俽 ziT_l:hIYTtcV-6`=A;W׸ rh?jXަ_Φm͹*dwU ԭGwH7ۭIFOB=V`&Swc'QEŨն[SQt5(U/ yHY=yh4vM Ƥ_l Ar2#bväDy آsE6G=Tʍڪ{*-h:)Jj]d*2(eFAj0F-HOOdPJ;HXoYaKl+TO XW6b]MA53""oQ^1bp0) ,*1"@jt01Ru`\t%i#*`hAgМۦwZ,vFw -jf=ؤ=Cɗ U{&dd, 6#TK.dP?uN^nY%~A^ Q%D7lajlJ kJV@P Vp*-]QZش =W&Eiq#f0ZJkJ’#iVA̯:^#@.* 6\4*w\"AC, c4&;"SJ,豈%a4QAygjp NTQn-bgkqlX-\ ˨!ڠσQ 97^Ò(N_!7-2\r0B!(JoK/| L^|bt%)V &RnoA.&wK?j6qj'Ჶrl=:Bo:1.&_ bq=8?ʟ opvT9iq5<ܛvsn>Fbbsrvhm1y.e[r o-癩Z=(BAoPȿ=UvqLN 0ގ DV(Z;RIvȈg'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N vd@Fa͈@88W 3'U؝@r:!'rb'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@q19Ҍ jzRj]OiZe ^h_o Z|1Id6EbO_~EtJ/H~O_,9]55-5C A%"=ak~}}{2^7u[ܠ# "fN(7Lߡul.uLnkz- ">gd?d= ZouȩA$JAQA5Necw%XNư^L>X\\XB JAV_ olڵb0w߱!/lPK)1A=|MOfDGFS\RZ6#:pmv[;" hj 7NWRS+#qc+VYtP6wC ҕFDW ]\BWֹc+B9ӡ+'CbDt"1;]9Ic`:A7""q<bEWxFUPV1\j'a46V stE(%vV:Q}>jU fcz16?;6ֿ2g,q[zФ)"8tE6*^<#| 6P~9/M~^zӛ ϺsC`m*XsU*//_) mNW@i՟7L K{`pa]V)y呹6pZt1CO/}_ IvjV9—֘cxчv}].E֘F6l=·4Fxl,ctC-ڦ&xae}y:E;Ɏ?znSi:-ʫ|/_B-Α-u (\F'43!]蘔{ ;-h_>#? h?jȝj]Eu}¯fu]Wgy/8`V<n5B^ݝeA$Fy}z{uG~ͅVYm[s |=\_!V0mcx?Lܛavq~BJڠZBM}U;): MPV't)5O kpMoɪA7(Ҽ 99"6UR] JMEԪf}HM%ۢp,ct_wlp(.h\VNFxwzݛX ,6Wsz"J{;8)Æ/7ܛ16|YWȲu[i2üI7}>oa~t--rϴ{A 1/?_Nq_i1<5M?j_QIΤrga']mo9+B>cpvp>lbb$ى'b%ˎe)2L"[&U!ŮSRk8~З۪ASޡPt< *G-Ut f|iIiqdc8ގyJ#uy|(iEګ)^;N9?|m?TgRrخ`w+G4FV;&z۾7zIϖپkgFv,X}iZٮknjsN<++3]s[{BEo '3_EJgqL/tZJ9`ЃufYar ֍q.718rPD 0LwG\fsΘaܣu6ffdȘ1 V(0dBrd6UhtHLAqRs ÅZ[ IbfB2N-}o_h%JC`%0%=]t0eDL.gFZъX*հf2  7g[^nVK.ܤjW.vw@tX~%FKe@)%R`FLB'N %Z-qYbW̊=HI|) lJcQdO6cDɑ`#ZW{E8;[K;r@O<ft,Wƀ XXK$E%QȢHn!Ǒ0 )Htv5_[R?|c_~2"DOQ rgK8̐dDj%[$"wei*ƨ>eJ3!4Jt1J ;帡ʈXMUEW:iɱ+pq4^eZ@S u?e3FzxP"$cr7cb_v2 lvw]ߵc' #WamPApC RL$8A8=AwA[{4p!pY/Z\}{_׵m4 |J}P?3d-Z%. BD9{ygy-92RA3WE%Ya&B&wK%L@pKD2YC5q |ydt>wurNN9Y(NWE<Ň8/RJ(S\de4N gca.(}fB6 £ BN`P#_A< IFqfr BȎymβĔO%m"7d$F (E!NQMpR!ݳ#(6^.ȓMu:{S;k-]mysv٬9o!q-KP s Ala:CT*Sy2F*gg6_ =|oȣs`';$ B̎v.Ih,т :xDl F6SyqZZAw7F'o1HV܅WLP"ZD'#s jp`լzb}0zӵa5 ꋜn HK ^ +ytY[\~0.M $"o \@o)˘6; \E 81iӍ4~`h {fl )߈-~)8湧gmv@&H_АF/1.K.3r_"-7-vhx}OTFmC~,?l^P(ٻQَKf\iË.qV/u_WǿW\㺾{Pi#F$ ,7ܨH>V&qUFȒsS:묙kad< 9$_w?aq_˹G&s7K<"q:}Tr2_1 rOYn#QGSJﯝ-y=Z ]G"޽R{vMeT&f][ĭ$={Z/֝S0[Eìα:]ϭGN~NxVָ71Ph3H#,miVWbXZ3ɾY4*-U ߙz R{φL>&+bL>El"SJ\S @  M4ev58v6Iv޺~a oEʶ瓆]h5ʰVlΧpUن)6{+RiNPql4^jXٗ"agV;zv8%#4I`Rߑ?g[Ȁ9z[FtLjtp1+ue[nVOzb H;q.2f[渵%*S3G0,HgR"zV#V+];0h½Q+y4O qWIHB<Ì۹h5}J[`LCdM4J8BL VIxeޥ?(Yv0ԒS;&4♐J+ Z#%H]N@aơǜ)補rGrYBmǖ>zq\>_;~11.ʄ9>H|$ P!8 x@ˌA' &P. 9ʌuJG&lKzmnĄ&'1EaD\jV\N4{$43^2&'D1mD.%\k1@ȨOpT^-UӉ\q{ <̤#s1ssyA.Y,DґG Pk7#B]̱)A}1j*rƷu9].GgzlzK`>>ͯb9'۔ 9i "K)q{ ;)/p>[M Kq "y%ʏ&8 d]C\L;A,D.KRY8E 8pAJ&!6Єd=ɳjZiޣ'n3{sܨ.hH`:/;m0 I[e#g@wmH_ѷám>;_6Xf-i$ىbaV[ݲQSd]UUXŀ#j[ڊi^lۋK w? Qd`0M6l("pnH"E*`@oOSq}+ɲcWI$Aay\MI{dEh'gܒ1P|$_Kq6bg%AO&x&SRgxV3@ XpȜ. o r-H?~W`Z\\߆4{R;?]okE2_%Z?.Z^ݻh▐h`ְ,Ԋ\hj1mu˩+.i :wڜ{4Y>{S11c51k~mW$jo"\^XH6jHMÈajfY> & k5,;ppuXi I60WR娓s 2RV>cfB+9?,)ٺuJN5?fǽ];ᗿ}?cO/?}O~v? L1pDj> }h%[ --2nsaY붦0_omroHʏo0HObI9xViIVq]ZAWlٙHҋ.j*!IQ.שV!0@Ki[ʊYGd'toNrCɺ*u)A,̹*#0F-2:dTg'a2@#r 3ւIeҟ^n6gw_c_t=09 t)zWӦ|(g0RL mN2xQ !t\,cJIG̩}l˸KGU|^(@\7veB{BzߦEu}VgσA)or,4ry I;5CiZ`Ws=j?Ā:yV"z,ϫTV\b%K 4 { v;w' }syKq"dҼ@?*IgIcbTs,ZDܶb1,d$mיK"Qygδ-1\|EfAGI8g0LHdfA+.$x[xGë~;ҨP0z))+#ε!UIHb}>&rg$S% #:)6Fz:s Hs !#<1!1jbU ᩤULGd^Sn3I.7HYaJ K#01+0;gJ^Z82}M*-WeK 9R(dinuʡ*`rjnP2X%ggN;:;n@[Πx w w\W"z8܁Vv4QAY(gPVɺ{, ݑ~]a'qDDKϗyy4P6jc+oUexEIA'H<,>/@s9=+HvVh*3\n h]'8@0=>r7G4W 'j~nx30GeDL&#S&A>bL d)mj7kg#L;.[\5]v&́LUn-2Nb4a W%PT*hP`EgѼ$}J@Գ,F^(F6hMB3h53A- RCIBVERbL9X1VȃZ#c<@-f6!fS Z6Fey&|DngF,|묃_f~<P>2/%ؚc-aء##{CG/K4KKJW2Hz! Y% x #\a|NQ}WinNG7֥8g l9kF߮Gole]\7i i+ g Np}!Wé/Zq Nt`zirnnhZ6IH㸂l%h]yd k%Y,TQL?,,—X|}|&+EzȜјrsaBNxbR%n;|ȐR(IJR\3d `1 F&hW9}1|SzrՄ8s%z|z_\8Á7R;LZj-=m 0 =ʀX +\%3Vs.qV~&-#@,CT1i8=!5,J-IrJ1%2[P( 3nr2,w)!EE&\D4x'SA t5rZ(ϐc2SjQ{C4e'9X&u#k]@ۈ]m_ckC75$-=G}taR{n&8.6i_;ͩ}|z3$*l+TJ*ʹ Bkthz ~͗e~obQS TO)];o~SZCnyf Nә'aDԟ\U"Xk2 *j"{Lʽjx;VuR!y}_p܏UpAڰlnz~-ߣ)o{Lwu)T~Elԧ[UN:xswp7 &S.ze{FFlZ^%?ۙ'I+ujYtץϖmЧ?1_ZWOwO9ltiC^ͺCjYQjέw>.;i~([<}:wyRȻeW볯nx~w>A۞K~K\Ltƛ]ċ޷y[WxtYwuV0@6q74>ozRR#LsSe%#[V!F2zkѷ;Og&15pd'`<R@Zxx(Bp] kH-1oMTIXX49$Q"'QJ&Gk3vSGg2z8uI˘fγTަdQEBm[#[Xk@ua?}ܘ,rAzCT w\ b0dKfq&}5G_BV_9jhryxU[j[cZQ[ף׷t-K}'m{=9VC\ rJiGWmq(C@QdrFA^!F^ur / ՇӽD;wYw H_NwVAA*NArvo $2m+@/ 4goTXKCx41:!y.xbd67)jbOshM dcJ`0SILV8NvBPRhόM 0E)cW+[#gtxrۆmU8&UcvPr~hǨϿPFec91ƐhmJF ʉŌ(RR #Xr o+;GoIlꃐld9K ^]t6ξ9}c1[eٓoE䊕ұGߊM[Jtb~+edU!؜N:B5Z`URN]Au6'=ɨB UN]Aueu*N/UW:&ɨB7{۸"$gCgf`fIvNERed+ɓO%ˌe9jdQY?o\!~cl, ;r<׆R2ѩ]!`T ieR*䪃r彲+d\!.'SjGZb+4vrhz>` a[ `Trk[&Vqގ\QȢ+ WmF7mx ugǿyJ9jlG> -F#[+BZCR{Ql1s`ވLC-Lޏ?և`)87:"PxpeRKݏl87P92AJBd 4)`!ҚRN 2rRR+,v)}ʕb+IH{2rtR+m,R$W+9ǟOk2rVy*r.3P:&\uP S++udjW+ƣR誋re5+Nq hO>xBJn\uP`^Qtd qGHkEr>U H{FG;*r.WH\^ Ҷ3d=a+Wad0F6[ȕHrT8!'_ q"WH+lKr%Lzڹ(2ծ'Vҋpv\L9_U'+sx2q2-xӂڭif3ĠC(;ȍ帒`C­9R ԙFy3sVɂGd?{CYvdcI)Gʽ|^3\?P>1J6iF62?djP4ˌ{n!#!F?WnZPָ͐s36fͤ4S+\+\!ұRj䪃rpȶ%$WlTב+R.WHRtEJ(kⶽ$oR6I#WF!`4B\ALU'* cs|+:R;P\uPBrF2rR+um-D'\ymk#W+ЪkW@Y]^aƶ]m׮pmSh}[QȒA W2SM_O0dgƆ"iD7Y((0`mɼmEZ)=Ko.mr(QitMLwPF1BrN q"WHtr^%\) 9%$Wl 5yhMR&\i 3 "#Wr\!H=d]+c\ֆ\!' 'Hd$Wg+vA6Ad!^2vR^StNbo(B\:=H++2U;m-f8f+DWHYr&ˑ+uՀ9eiKڎ\ẖz^a"U5U\$WO5=B[A-0PbF8niOrSAAMIp#KۡAK\ "f 1SnϬ!cdr3Jns3lĕrfc o֗a Պ\!R:䪃r%v%m+t q"WHer^$\)$W0 q"WH|r%\&x̃]!$]!m[uQj䪃re֤Jl=B\OvG]!LU1 ϱ^sj2 Ɵ ifs&$Wl&#W-BZb+t>Ջ+}qs `ZaR+Wam dEW:@t Ƅ&$Wl,B\kҶ5!3䪋rIi2I' s*̀#T#wzB k.(;p(-'e nۓQhpGH|2:e*AH CF珒+5,vYꍭ*\uGJsBrZ(2rLiO=xJdʕNEHL 2rFQ+mkQ%gI(WF1/)ծP q"WH룗+8,U*,GSB\eҚAtErJ{C)r0 FEV )KrAq{:rDSc/WH)pɗ#W@ӛ̲{À[/]jmk\elu2Ijz.b6}#-FLX ی9pY#X{*=Y!]lRz'-fP29 I%GAڶ*aUSrXXIqrgfYaN;B+)XґiUdJIH/%!ec$ݑi(E4B\OFV )IrA)Br\Y2rS+= )c,rg]XBևA:f \!L (WhSaN2JP+w1 UI:(WX=!`Z2r.W@HrArI'+Cgq"W@yvTI(W )X)#!SBZ}2>7{9re40lqm[p]˻ujz.a<ȕMrT Fm% ZsQa7|:E!AKDžut7 qaGw+7P)Ʌ{GwZm<( cdLBe T)`!7N+OF m{0Zc+))\I{+B+U\!mrV'\)0 #WnA )LrAjBJ2r:R vQ#V 5\!5P:ƒ\uP.d q-BZgc+&UY$ 9Ad2 RRTj\y' =dp#32R;R vQzw:4P@2+ϯ{~[,_OW \Lo9'hŽ /2]xGqa_ LzxvDlj< D8~΋Ho9xA\7^̯z/lS7UV΋բ_fW: 6zC_([]>(c LwˋY0{3T>AG4Әλ\/WWZ4< Uqmg=V~ []s>fO?Կ3OW?&!|)kʯ/# xeJqr•LQf]Of8bx(d l.#}/WupC&ҝۨYpYKU=oCNn `d@]Sp]2<.!6οLyy A}"18qs#Yjc^@eXn:[9Wn}[:)6lfb.33;<߻o=+[CrH V}s{@_14!=MU#I !؇w\[;?nSW}gc_7X d6Y~87f6pmm%e̪_^ɻtFA@_UểOOcRkc] _)\ 4c2uл}. (/2_b1gu!FP=8PwC#vy$bZ0'%waSr.r'X@44.X;R~K6kVT~Hmx"8刔Hi GIJH8«$:WADQbܯ@V'@1" ᭖?`Z҉`oȝSXtoQVyJo&ldۏxКMy/czs{ ћE\9䮹eBy=h{#81}kU.{0ifJeR S9OdT8͜:R<'ԏ|jLzg-u<ݓ5jX=FZX>w^%Ut;9f ]wsZjn),Ү2J۠-/p`h)UT9WVcc^Nexae=W^JFRZt79Uh4$8gZQlŞ?RY5>cN,U),>w7rZKգ|588cw󽿗yot!S:/-Nxr= >jř<§p\xf@:Wj1)n$>.V4>$Ac'Vҋ"1owK!޶]^,˼tURgbt@ȹ+#72T*[-cT2f>Ǯ Jʝǎ~2)%D7q& \e!GYdZv5/hE%2EëR'r]y#qq{LqH&3Ca7|nan}DX>˪f2&F~_A#U)N.|ϋ rU-OA7V}ڟ|%ǼB~aMhqC- Oh7r$ 蟮C1O:4qru.'˺&Z䙸.^A,,sr9x`=X* |߼;Lr.PEWؖ8ifZ-.}yM){yឤmԞfǡ?dve%6ؓqF fT:Y}Mʣ/d7Vp}qElz)dN|۝Xуt%Az KF輾c0LKtjh}pDo!oq EU$q|T6~wڋgr4h}C~Z/Tx* B?g{1DZ\MVXƘP0ؽȁYBY6T ^}Wj`VnGq=öyUOaeկ{{gq{x%-k.}okq<}oa&omyL#cBT` 蝫*!n<2ƔvfN*S(1v~g9[ |W1VqoOq, {l!/&9 On`?}woogt\pc?8n澚ս2!5HR[ rS1- AϜb§g.a7a%sĠZ/+$F>ְ; 3޵$[t0n73&G$Kd IˢdV4;!&~U]U=b81+uT_ZE& z1j9J8.0ज़XB+?a>1c0u:2ix)1MKcChjT]QO+M'׎[u 6cz/f$OHJhGcRW hdxZ񔏐cg԰1q'7SZjԃGc#f%MƤjB yrd$@Տ/C̐ LXxK@KD3}$Tϊ>2QJ&ingRȕpʵĖ͏'2[e:޿0j9O}vq(fhbBP1G#e Nhׅǁ!thOR9vuz8VbQ8 }CQ❈ǡ @ Cjƨ:vCeȠ3ߘsl2|P{xj^裛 Vb᫗)<ӱ݂݊&-X3fx@%fu2e7xR.c+D980TuF/ǢaY?5rбn2J4"& \O5ͮVBl'g$X؞a^f9b'Al[={'6Ӡϴ,)DgsNga+ A0WP$M4(%q{hs [K{:9={~ZZ"f<=kΏlw?o >oߙ$q~$mkpEӛ>P@N|MV7g>g 4;6KNS+ M~#zOU%yBP)& u쇯9xv殐Q(dEcJYQǑ%& ,ct ez,12OKI9S{]@G3(86׌3 a5:1+u@WY1S*RrBg$ <?kC5)3D ]S d_pAγSQ `,;`cs  j3hV( zD~g17 ƜLҤQ-F ҍn&ڦ7-ѯyn0}}'|(ۏv|Vs3<c,ИNwv։fk1F>>O.=:1"-xfGLBY7!6ss, \v>B`4 i)r%5Q͌*֞$s7bU t`pt[;*i**CtQLqgg{կ#Q܃{ts緫 }*5Ъе㫳lf2K%RFBJ(t܄ljB-S>)LYy U;iQ,cK"_!!~^st,K6OV*Xh0hg_ƕiق^4lGhzl)rY~B!T6T*Ⱥ3ڂn*{}P2ժ0_o?AK*Uo'WIuXsc0BX T}1pPD_@8oBw#*T% SSU7lWKmvwfc%*1>>d&in/d0>t< .6Xɤ>YgMkSFtS!w[gXĊr돳N6+DVB*c̼\OB/wI?zKkAWϢ^ĽZg>,#`ݮF0Uv}sN$I[xpJv:!7; j pz+oM٥eއJA{==m"/߿/ uGG{v8P@wO 5oUV[9`qo. .Ƃ9~jk=6!G¼0JIspĸV}K^m;\܋[߽yc kI!R:9Dh.*ݩmS}8&HU ԗ䞓0!@ơBJT2-)]KN)=gN`unF܊AVG5IR.J $}q7Ӟy[kF-J,HT̠Fibk֚~x~t.a>K!)dHC~]cv4ŽM畻-ѼAv·΍Jbw-3"Ӛ1TRJ*;X7D.ϫBlzUumY9 0t*;n&"Ď-kHpd+蠔}@p< IAEOty)gGZa`*=I`0|'ol=,2(SRrܟc Nws۷ޚ oWv6nj}nr{W䋫'2]Jc6kB?:=˟7Ww$o*wW$(M@<J$ fm?J8NVK]_>m:Z [GJgݫkːXk/f%37YxǏ%4Z6c,l1k~ BHL[C$,L<۽ry^i c OIƏd~&$SI9*Kװ Od j>x-Wkh%{e% -Jej rp"0X&! +/NhkaxYXCP.o+>&+{4Oe9*)o9I&6H0ܵ|ۯ]{eM ޑ|Y:t>0Sˮ r\ytP vq+'X( *4Iix%4d `)ev> xǏb=ݛ{|e<5C9,u`Np;~:f+/t.X%4W,[٣cvO}k;DޘhB;_JD|@ť 2>UnD6q[C–̢5+וO Niֻ )qBE%K!-2%Y14$aiInnϣ9=;1WN8Ȭ߾.ʠԵs"{mf(N@GCU+; QqVvbe^ZBY-@ǩX`GAJDۻfw ъ !q^mbOPqeII/'hǣ`u+(tѳU!{"֝޶^l$uU6TJ0@6b8Z&4^IȂ YIɘ8?|F֯j}ys4LdU;ߕ!BHm~iMi>4Ou'8[,+UɌ4x;F/KFpOve݈0ܻШwYuObNh|7N;{s^Z_E6{c+(aҵOpB}!H7wɽzMY@++T#q*\h]h@ k5?? -c[ _܅}x'M (auJㄱb3(ݖgCȿO5TBJiLi/ +!ˆ,*do u!UʙT1D)!󽊘"S-iVX RI}DX!7JMWjq+? /Nc ^( L@k&?_o7,ެ0#%z@foCm]- )'wlJT[5GXe̠ !pPl\B_|r~1BďYS]cī`EHmlp*CB띑V>K3En8nbRE3$uzF,^7#VP+q }!@{ޒMn01-K,2-pc3,|Y Fsמ]hc *wu$`v)t}i7iG@R?@݅ K6#†|Dw;J(j쵬=IBvul2B!FS &噲Z,C̥!}0jT 3iX-^~9]2I^*'uBB kneFIOw-=n9ݐ;X*X䮅:1wB,z,jF22:FGcB'LEl`'Zkn@lg]9V9j= 0 k! :g|*#m٬EO 2{1uF1,/|Sݼ>젠2E"pnI|Fڷ֨}PeЗsu8*:E)V*TQZ "\PD`cwdiP1QVh`_h|TRrsK+HʼRA(A]ܨqHo19/Jƿ8 AʾϏ*Q9BHV>"i\145CQӖ, pgA`d`Gc-2dJP|tbQ_0xS"L1ZW Y?܉1)!56pP3 4 mR"d%Tr`< *3 ʧ< Ñ}4+_+YH$!"Vbc7W;E0ID|9CªRU?$3y#ۃ_(xL-h$G Yz q.jѫٖAOf)o|6__b$/ޗs)7?&~D^6l$51gr9K/!I[(  IA #+d= IM(\R)4Y:#IwK_uӳ@kěeov֫]?No L `9aj8ux<&Sq(Nw\2ҘX E5V/K#bjZtf<WXQő~QD†CW:Z$ 4:r#L T}VYn>Rp1&Ow(geXXm-Qyh`AǟX>G? c_;tO8Sߑ$ )q!0dHbFwϥZB)Mvc!#}(B[1f_ٛ8(b'$ ;}.|;V!DIq750E~PGFaEZ=xY,\U.^|ֻQMb!f}ϬM+Tr/5$xJŽ|Hs2 <*P|7 cJ־X^;aô q/vp  g9 (Do>R3 lszιJ __%cQ jլWHr#6k ˖QD_/WpكrST5b/id>r'؏R'ˉRA7c_{^taA$̇Sn{r V/ Nf( {">V6=,>1<ĩŚ☞ ṳ̌ ItjU8ł!kOI|gך3n*H*x1*ha]|^-E2Q|3=@0gIQ}ooenlF0RUSJ?>Y0M,p͊H$-Y4)8(j!ߧax:.b9OVR]~o~!.!6'G"4`$%+eT-9Fx1dEt(*}=efZc C<:c4.t%{m JJBNTgmy""k#`ܜt'f{hJ$C~eV$}{CQOYDQ'W{oSDz5v8}僁.je{)aDPHF4 .UXPY$ ݶLa;Br%tBhs#Թ\x ?2!0@*'!|W] I|x"%!)<)#" -s哪gQZvˌde '$axl ebH*P $@w9iBZ}<Շ_VI`g0*aVE~#qO/6*٣ŵ],4M"&L&_PztvpVJbBiH &@*K!VZ96xP)JAeIfቭB^H6U+ NU-9Ia 000<]1^Vz)'mE6S(<3I P&Ӭzn<3ḼS #}|&$UAYjo>syBǙ,30yPZL y)rP˽R ~:,qd`Fe!_wYRy*b}U,X6%&*=hUu6WߟhIRVCW*&ɧ:2OF՛)b(⣿U9?~e4E7i")xӊL4ɿ_G0w35QI^W[[ا>̇Q6:zo5+Λ/mU5~)O,+FQ=:D2}Yoo44hFt.tA 9X}> |j s0Rs1մq2x-s|qه}UK{,uT8{CwZVN~]O5L8)'~-;ɨE,2t"8; VxnYC:ݧU8NK~E)Z**[㘣iipcNm91}a0Jn6y_V0GGt|Q3|;P@|,q ghMkp=uT*6J09Us BN)sf*iQA7l>G.Vn K/`WJTP!|÷CC2@!bPj=T5fR_]xLG$5=-%"vPe((깎xDUxj@ j̦_xL9ap?E Dh$ak9|tU}ME2Tn20a &3J87'A}}\`]H6rKBzc~.k wZU9gkbG( U!mMT#Ap S16: NiRbRw ڨ2Aow_? /cO#Im"0Y$Kخ&Ve?Ѵ.7 34KD e.ΫHc 5BTYدdgÏRsV } Yc|w~-0I80\R4N*I`?J)B nwEj&KdZ0t,, 66*DXM~y"VTow M,2B{z];0tT`;ϵBc|J[CYI۵PR0VNl߽+>$iUW!f| oHS.04"Awbm6@\_CX :2e"ZgȾc2J)&zWoE<%'mMy GA.n\W1IB햔X\l*ѷw gͥ@DC`rI_*9wvt2 񶿾hs.=p}hkj2PB8$Ǒ"PaY":(T+Ƿ)V/ {:שٸmVJ0j:ORś^MA!1=}S򓳏9<\coyz?=x6&%^c3M+lt-R[Ff>_oΝc ab2W `b9L WlX"bdBUI `'dSHDBn&5G?Oaۅp0XeSTqfBK`|^ 6^h iYXDs5QI[ECꨰ-2[Sfxz)Ib9K2ZC{:gX[>HӰ%NJ3v(J~ayqjyΧa]t>d: UTaʕʕu`pӢaI;^PVpOp/qaaP"LK_&t KI`> dO//t}v;c;@T?w!钧hJ5GqN2.ڦ`M1hq;](mgݸi6.#Ё@h!lO.DB^1e&HO ߪ3bL+7q h8db$@j $MR;ppvMݶD {}x+fX: ywT-8![Ey)>]6Ÿfwftx$v(`'U (O*lH/1#KX"eJk_H,<ℸ9w=`w@PW C/r ]s; k5ds/_a#d^sѰnn[r&(@ī\sZ/eZx|z ("$Zn6Rӱ0}}ZRA2Tnį1nccƻkf.^uA N!5zhO(TA$;.{6> 7OEaZ/.޺Vwo{o [LT (v,$R)̀/w>te ZFN5TkII`Dh$Z,U!Qq.j̮,!]>4s>_ySK p(JۮTK`` ̸TT-_Y\\ebe9@|Zn<Ȑ.%w#- w4yF9^X L}S+TyqB)WZHJQ! J(<ڞcē͸wefǍicWVJ@!/vx6EK(5I@<vRJ.^Դ!H({ں>b:Մq4dsKj9O t/}-:^f’+;nPE,>f9|٬#qwvMQ nQyzPw0jSsFBN` SW`T6vGJ4_*|VW(\ }zuw;xA|4ze8yS̜)$f Ir:l:uX1wOsmw<+ds9|LpZe1=g`7-.Ӈ\QN&9 8|OYI!{ SJoy5}֨;2uB+=Ԋ+x0p" i{:e{)](Ay'ytvjBz+ 4]k_gжkR _3ˋ֗;9WLEJwJNBܢbBŁ; r+_  94h 1bne0" B~PGeBR0gz}۵B|W;Mʡ&*Iܜr`tLv0/[,S)LgM}L|GŸ4Vg~<Z%xPn>F:Ґ+u_^E6nQe&s Ke.>d"dT9YZV tu|&9:&w)m^RiꥰUa=u-)h}ټmrk;Lb@eIʰP b $(9nm\fBk=ӧЙK]Zx)k]*6umN66½qgb2g6BUMzs/NXM8*\6OkTyo]! ~4r0( ɑzVaW.1v=(ʟziVظYiB P_7UvxuAt5m/p @כo^¾Uwgz͞Ŵ}B%Ya26[5ɸWqN0TlRl@G RgtjBgf\!m/畟v\s0L|]?LZ`_g󐐤}uꊧR]GnzֶNPZjQjW^UP~v=i޹} bDڼK/Fii'xn`dPr }lx_q _Z ΋Ügwl ]l<ң^pnR cnÓs"- %^sQ;a DI~ ow} HElB_!k0g"Spa&?&AHkV8M+* $,Fnss~Iln =͹^ܞ?";nf> f˅^q_uwa wq}p '%mgT%n]+Pŧ }eNṗFc`LJWb*A/T_gUT4K'&Sg:HYb弎Kmp.$uKiU(raO;bV_`P:͝}2ͼ0[tm~ľZُ_gM[Gn1h^* 1(;++y &'DRٽ%{Ŀv*+d8'˯Oϳk{|i;{c~_G|[j9 ~c4Nܗk FӁ%h`ϭ 7fEݍ!I7j3HV/l-t)jHq,<Ƚ)--8l&1`hU$CBy@:-]Ǔ0kHǨ@$a4l~K`˔8)pxVw4z69cwńOB(Ot(vg?\`n{$VK{6ž(i;rs8Fz_GEj_W/97EFcmq]z>-8||}:$O_=FݩBa J-4 (}n ҹĘɱ%1%)ʹ&+V 4]-"z]MMt*ۖhѼ^? }Ɂsƽ `d4?}oCH&9izWm."Mά@^a&>)!p^9·7U %nx=[X.P(ϝTpTf`/?$3Kz*dkqǖ?&#m|tf*Bf-lXdaNVjCA`x0MTOamsmqL3xB7"FhZ!\!DUm#٧s$<_ie"T!5Ns?ɤaovwztK066!76I kFVr]q,4;%7C91*$HV||JJTt(pDi mK:v:t',5([wÀoťvM}JӍt0B(0֊Z; ϣƟKp_j7+8V[ 1{ 2bXd"eB'OYÑߡU+8eD^oד܂yd$(M@9j8" /ScD%49n5iJ!rI 'y#;PqP}9kuC_Ng$Ýx FKqQj5X|jvqTBcA>S4Kzik"t tT ~*g 4`݃]Z z\:~\' x޽4[kPJ,^TADim NE7t,Z"0AǮt ;]8f5ap*ԣpL)oq]8cc4acE2/1]8 mc&N˼HNOQ ߜ¤hw#سWp< Ѽ} K 1CY"0FؽMJ=|?u  4pfzdlkBA WQP?h ^1er-r )RS;$]op*,|k_|ä?43"\4d!âuWDƐ˛œ2$Q# Z#K$5RPi`WD.G**u.1MdYl԰;+0msdїh}j@zڎ;SAP$zB` Ee9C-xƃu%hf#|^YAuŴn.U^cHـG3 ̃'Zvw'^؇Kb\g|XlpTˡLA#j;~)& gV-wr,Ʌ 0̬Q/|._Fdz=^ Og63Wdb{z4U_GJYr6I@˜l~ y_o 540=\oSiSj$7s$3א;{r9'thQyK~llVd-hM_OSNC緟#/[]o`ɊٷiaT7mysP(DZGsJ9;\è'(T4T>ON|3Z6 Z +#H6r]\/* YnkDSXUHh7-o`i/Wrby*(Byr5cU6d  ہ!̎?j>私ŚRQADBt4E)FJa )nY=r5ΚRbZH2Fe,Rn?8R~8w%%'YRMDŽC+wͺOXRREQMZgscHjݎE nKDm`Wz?ޠ&J(v[vT"1\$:NLFJ$(\ɌcJhljrBh*rMH'ZNI`ێt4]Ue{ȶA$VL'39ΐv#ͨ@`Wp4r6Z/6tH%!XVa[ qg]7MԤBkHdD& %AKWRi/h0N .^xˉcEz _NGm*LPf1DRfl9ؠحO)XOfgGWQB/8cv7wR wmWؾKMTZv.Y$צ1Zu6cXffr[`^X4(෈Q X(@yGj zHN bo鴴Mya;u{rJUc`n'vb̤ ykG%4BD 2G WuwR0Se2Ôe`qEm7*qɢ]Q 4E"`R1| 33e5Xi得ͱvDBu;q+B61p)Ԅ`3GH$v nê /WƪLJ f*7YD9AXCh뀅~* 5JԗH){ɝ}mߩB^CƑN#NAxnab'&As#B a#蛶Wwm{ysY!(4 U*cAIK}؜5M$ %nx=[ \Y 幓 <F%~5)mL.UJM(,E 8Kt㜯ily\)i ԧb 0eҮ*)xm) ֩5[jY!B.F6޲$oBk \r#t~/oR1m}"W㶣&>^=TA%惹.Dbd%Sx^Nod,_hzp% :58)'mG~^L b^;= Y`:r=495ImL֕7\Imi7YcV5Y#Wq<e2i.%VF@pN ^jTB}ܺ\ ֬U?8ZL"\ *1oX jتPgsL` ix4wZIK1H vE\',X"(jeMcG< a#G:RӮ*(k;x 0yJ0r‚&2ѕ`}jqաYjXqqܶrW5&MylѥD{ЁifT$V5yӍ!8Ƅ"An*Aq*UczTGcieKA\OR j$S<#m\ڔ7=N[凓lǴB t$^-%~aij꒻,v=PP=(eq@ qEkg_|ޖ˛NcMj¬tlo9e9ov]^N*,8{/+ b]5ޝfcUX^)X7G|}͈/s}M6sݾ>)i#7:_Suℊ=9{֐O#rq n~ D*R+9,}hymONNG4VM753i@:^oUp/c< 1c{ݷ끗O9}o0 ZQK3Y0Iɻjڸ/]}M*w(/1 Oa<ŔTv>?+[Zp7v-hGhBDG$:{ 4`@d:;ocڪCF/GۡR,J_Ry(>DKѠkڸzOʟܯcw̓4vH. Mҟ/݅uF,Npfck" y#n3}Sӷa6d+2W Nujz6tŋ?HoL_EoLSFnK)+;ۭ`v׻$/ZAfPrb|Ձ%M6GݺFq<.ם#kP=uSM]vV ۍԸ}sdי^+xnhM9W%oeReȨ+U$AXCr zf6n@^J^GNMڶxhv0a].[x6kDRL~38Y4m 2ǏeǴq`~Y2 uE8.RH;6{i~WWm-Fb!.;%|rF)X]@zSo|c/g|A/q\> o/5r1&)T\4QRdnc [&tp>$٥K >ٙ.k͋aׅSzi<}[ uhM.\(F?;,f簭AVSTS\v4(U?iY}`"+R*b 'a+^p{n*Ta[R,Utӻ1X"{,3#RwZ-Wqb$#$,C]|S*^u:ŕcه6fG)JdF!\jeSgUj N psT]MPeE|kW_:EzQ^`(yutX ƪHAQT=0u &T'DN{b IX^=ISȓ۾@CeSRc!BSE~ЕTWn+͏V&H#C1#T*)l{+QuC],UIWX4f]itKmۦǹR7y0 T N7*_Ѿx=NHGqU`&jxe5 ҨQI% NT p`P-fDVWWE 4 \RKqjJJ]e-"MY78o yh^|xqJ(cj\J>T 9j[V+lTd4TJ\z4H9i.da10V*W`VU*b Λ5WjjT PyYݩ1MfyǪ|5r?Rs•,ĠQ:}EHF14!**tO`SٸHI4$pݖWrC꾅`+AQאiicK@\UEe@l1b ^); }8,y9s2s Ca Ż?|w&Xbszo?qݬ c֙v3al` gArMir|$1T[i (hCTLC\Cv.laS&?;;FƥDEe  ^8^Bp8&7>,;SBt8c"V(*YXcD_vpv3[oߜ́:aU<9[Ln1}e޵{(N]5s%\ZLfjƾ-l/`L_%婛Ē-D{юzO ዳ/~L=OVyyӔ~~8=ao6;U\@;Gq'ӑ!lUT$3F1E"-K9*k bbpek&%Jrh["HS=m Y/^_IEsĮ hrufTnWJɆL 8ꪈ]*UPV^9/- \E1V/HQ%EHukڡ%7Ǥ7o>5.4@k6ٵ6/߳֘ƍ;/cѫ͖tԚ QnJSX@qiK=Kz9rz'>UM-$(28~Hр) 4]eFh PEiU(Vq9` =D_I@r-N;]EwiicP0AoQ(,ՊP%r5T[{"6\u%8}`N-!0 Y%WU۟?W/E~'Oo:;[*$QLl51XVl&c3KДrTfMuMQ=̺LٓzkB@ڳYV)+Y$aMEeªb?#|/r,Z?,;UeBʥx޾n.]]܂Н|c泽.bXC?{؏ޟ>Q?_<{Ԛ'굟'螉seoN_~f4~hQmͧqmOz5\~kC|*\"O?YznhY"9|U RR3NDߝؕݟ|;[T+JwlԛJ1:Կ9:Tq2iV4d a\ؠ)6 dHhd}AUFIG6PyiRe1Ne/>4N;xeo31bޓ'V0*o1>w_+4ny`wLY:0ßG.9N@2bS ۠:)_>_=^ܲ[1cʘ ?*OIe8GK6\<آMq͛MyS֢;#ڐujM&֓< iOupM R>Eݽ^P'+x9Bj{ dRn0;<|)P`IxJ߻gVco{ [|_3y7o[ޣwhoOGańh6%U_/t4zqP{E=-ywY ѵ}iքN: ;n]^i|v} M|ys>:?8՗ӊ^d<>X\\zow9^eW2n=ՀyF AE³)59rAwFͼ;3/@T7fēREzҙW?5^ 9=q{A˘sZs-=b8sJa8zڭ9z).?by҆ fh7fzo2:rHz&&> q $ʈ Y rVE?>_+Y7?8h>zG*lO 'x 3-Or!])Ʉp:MGGޏ<?|J4P܄uij. @U<o YMcRZc `T{HU*by|(,7$٪rAAɔR~l\O_7y<~)\V+rP{ 09~BLϣRgm}ɣ!7eVޙv9vM8LY[mZRJ{וܦDWM: 8RJt-%ZX?Vꄵ#CYqļ8qu-%T.OƽRBba?0m:UŻ!xq<0lSsYøbHKw7JE=^`KurJzvH41ԝ_ x *)_SuŽnw*ru|~a N6-Rٶ*>ye1Hq: \4yPY_@Ti^K4F/~]<KrTw{.'ꦟo_:} '?‹+]K=i4eZZbQ7erAr,çSznd?pHZ{C[R. f$:A+.7gC+##)v@50.kz#Z9*{)qUJ 4ow2!wp=rS8۠E l|g,;CJמTzMٕ^%Ug/!SOƻ6cq5F/QrMphuH%KqH5e\ēkV# nD)S!W5 O(i8&I] 2ScZ'@=zjHW~3͌9nr%9Sɰ cG6\ >zj9D2uz2&vW5D 2Psa ,SMU & $dZqN4U7&hp,ji$-Wm"c&Jbv9x$#T A"U'U[7:Un""5!d,Cedl6:Kf[K& o^ǬIl ])kbRnXq]R؅^Ȟ!`m>Ox7a ӟMAs;2!nOm̆C`\o0eH>վ<CKQ4S'{MOqȨ%m;YFZεMP[6g\R ^r:-%UJuMwHaEs MUFlP ia 4J`dPpg&XM[ v,W uL,eo/$m?bx Rf! @V1lP&NQb :s`ZB”(bh.QlAp6K8/ׇ>Ċ5bI%/oB+VE4;kGI)J=z&*ʎl )[LB}vs!O茓-;eoOFd?ihPS9$]QJ#t0E<~hx@}%2 5/17>My-&kIU E^쪋*B VœPSV4AYNɾoЀZCc itc ֓@/~g ܵQ}\].^=[]b],.^xszKOd_d~7zOh2uZEv]^0 ڐ>?^MuPkVBJXv3]q^=ee2qAj,;{lWKkSpN9s3bnMτF;D G 1yQ㌚7@v4~.}WyQ}̾IyK<,5/U-EoI4ڿVu"fm]rǼN- q"}Xh([9TYMUw5y%rjB"sDSbxX)GIv߽-lkҽߪ)'f: +k@ITqe}tϞDi9i+FR]MK:lx55znՃ!5Iۼ,mMc ND!OQ09ܥS i''|3cEW$9nx>Bc4}xM?,: Eb2@гL-|ULtY7\ql|nhRmUyΊ8 Ĝ,%c> hf/ը?j=$ė35Yd}pfW,48F0ny4fƐ!O'zqy.̪foy9zx0^ޫd1 SWA5IM-q:r@=ϺzXt%;!n :$`3.P/)T+-Cc(\#GntEw nK(XCpMV/szU;.IZ(Tz?{o ⱌs:%xGFl'$-uuqc=I~o?<4랜/fAE<)OC}]zJJSZq2ɤls('kb~CtR6q6-W'h 2xRSH811!RgHELNlM-T7Pw?=oAs=)c4E7:Q&b-qUq_R$Rԅd) ~=hrPIvg|*_\_ DPq}7qGq{}`K/s&ǺT0;wK~Y>;7䉰i`q,8:M nyN3hl:B~G>e7{UTrTܟLJ^+}#eX!OǂHɋ;ц롮'[Ѝ >ӲFV+5 KY5RTIQ?At8>E17!O ԍ33j _xlC1"wwhe$E;4̮yd6hwgoa9" 9~a n`uvo7raM#_mF2G_Bxu].;E襖[! /"a (zD.N!M'<%)~*:bfupKFx7.\5RK4TawI;É:@6FܠĆG.-Ij+BKX+;32+ste\Jdנxu&RnGqRMV,@nO68BVXU/F'ەk !3.6>곗5ur5cQ }By_޴`ؐt\褚T _SQ8dkp|1z G{D/5Aޙ;ӕs9xcIS-{; 7-X(ſD)'_璚֤"FCI*>Ύ__uL e^Zl.{T&ĺJ NZVAST^n%@|6SlR@cR5y"Oj-fR%c=(eaG‘XJPTrPu/6v-sb>! 6[ݳ ^{lÊk.y(@p4*2rGOlL_\6;xbN~\cnj?fR- יܧtiܻvk͵:sF޽vw}ru.k|v}QlS;(v9 BY)SOPc;8/A/SMVo@1!_oWMP JOzpFh>H1uJ\ 4@B Eap\SH-co9aE(QkX;76s)̪l;C81/?v_LJ͟ӡz'U&oSB}Ru7n)-""H2j3:b-WU[ .%^glOU^a}Dh89*0g*`}Ȭ`f1l{_%XF`/얬C !_<td6C%U䣚|uV4d27hjB疥:qQv &޾5k ! O { h MvUKSL.`5BF&gDgT*d"k Ik2>R u؊S2Ge1_`"F諎ØG{J)t|:>Rα#*pj8Z)@zG8<[M.WQʑUkm홺Ţy9؇l;{C0V,_"I~aɲvneR$X*E~dHU*7ge9hEX8|E-&:$O?>pG.|Xy2*uRb*Jd'IA,iDzŘ5!Z!T +KJO/>mҥV\X4A"xS< 䳐[ f&F6ip$,"JTa 9k|'RВ!%ʥB2.*[[uȜDULٱD[/8A pAi V%5Kat_Vlt-Y9sM~Њ\X܆CqvoXU;}[6Nge$t-X@3K1ދGl KbyOڄiZ&B_Gڐ;s*~ \2ꩪAAMf<X>ϑR'?GY\-& a-x iVNy-Pm{B#P ie4ψ/c=ib8Q'"έo;#7 Ysˤ|}˧Q̿5idK.9cQfKso#DCƫ GK;e]?lagH|z'N=lT``yI)C,KӣH/,.zr4{²tzgjb}m d >s +æK`C@#y n~0Ny^VL <$1  +TqlUu"@ѥuBitL@\  5VcM8FHa[o ^#އlynw@yx)[*֑1M I>z)Kio6zb1$| Fvχ!{cE޷'/pmRՀV:ԞDRK~E:D({ !16!nT*\X*RCGn *fBHh9(6Fv<'l Ir"Rh ՇXQ:H25˭Ð8#@%D3P_Hm8XP`Mڹr!3άŐ2T[.6D!'d3O!#pFy8~d1/2 KqZ lrt޶!|0zۡ6fp'ofhBg6f\㑌g:m)\"A0f_m˹ \fAujeWsq,:% !:%ꠎE3Z۪!rtF3J+}-E˟TKX29+rb8 l4c#ۂI{kmiDlyUm7up{L-G# ]XD-s9eT_*CC-!(z|s: ٳ=sW@n 1\OmSp-R(]zf)W:Eڕˍa>-gsCtS-MMQk KZ!YY/h.z[Mv :(oYC"r"joXDЛb[ o v8Ik UAop-#oo @=ZYo `9L>ޝ_\^-6hDj"C4AP~4 -/=4}ALrmкk(͐L ͡G(jp:3ٓ+^ D@\HLȽh1T6YzDx5_P[qa똸L1Ϻ&lȑgx<`饃ƈ١8|@%+&Orlw<|@m^g3w^Ӭ:nt4 *@KqZ4&l'hU?8F}71>?$Ot.T%H΍QdHdc4 Z> |ּeҠ+`mKH/N>w$TAŶ%˪vK4Z4ڵQC/zJ!Y1ߦ<1>z4 p3՟~2:|?OE:9/gPy\,*\O q{ud p"W5ΛNAd_]ߌ{c_ߵxv!⨚\]]e%Kڧ}mU~^N4O,uSuϯ {˖x/VlYr<6R^Ҫjg|p -'ϐ5W~m*E7Ɇ}IS׳?U<&$"@QD9'ޗp)IJyi.nu6`[Egy3,Blnw,_̓3ѥOߓ};=?8YNZd<TFˊmHnVTL#K &O!o.GbCi8H`2tۍǟZ&*..O[9[ p14+% AxN~ӊ6p99@Xoo׃nSUYbDc~jH _ ֮&1 _;?ǎ_{@݇޹8kZvuU oDя')ZGoB ^X0ERUz3%]R|+sm0 !ѼD^nG٧|bB ݎfgѹؘirU%UߒYqLfWODSgN;yY:Mǂ~߾ʚ5 :2:',f3@mU؎h|ixyߗe="9g$9|U$*Llf4'2Ǐ㻧/&r*tҳ>B ٱN.6.oށsʲ%YQJH%"if 4;^XdV'ubNlySw3vg'-yNsv9cځb1V 3x٢4y~{eXdG9-b5x2kq:CAXgplF&othsOxܶjָ? ?3A{Nݿ^u ҋZC8'U&V; i~9Ǿz?PW$FNs-QσAƲF)E59SO瞷P~ElԧY Pӌ9C iIP9# 7QA}6/b(NO'>fYU ݢa1J(k; )bu6_'<ú8BH]ݙKv^?S:|3 4:nQ5j~Az!/~}&>߼.-^Y&zd0I覬0vUTޠ7Xh@i+VZM e(4Q5P{:.W@1 mV!Ή8FB[Zjd7օmӆ8UrdIx@@ mxPQs]0J0 .6]ґmsH3NœC"q{\>'8tZ/nt3s9}΁y0i Tu3&Y0Ŭg˸x%hgu1v^bvf+@Y0 ,ohf#ƙhrf#O)B1C\*K:sJEB.Tҡf5S8_R|.ԋq(s,zqs|ЛzuBo7Ga5cVtL9'O, ]ѷE4w}~N+}h #LK*tb63)Yreh/4~Q)^> ׂKCHm+)EԲA' i1mJ|Tn`^-~>e> hb=?VbAd2ork@J!b".řK(<(TXcϵɭxE s@Jɬs`yjfAx,0[D ҥDK6'!tJd9ijzR%gx/RU ߳;n6nYtؙT32|de3p_h !XDiAARLrmVp乗r"zys(T΅+޲y $'JKN:cxb# A-wDOr@Ӛ^ވB:IC:NT9G0ޛFĩ;ږ>=_W"u0AH1f.ɛ tHuVA(14 =Tib9H 4 CTRI)&~@Z4Q &"vI<1")"R\2RwcNOkɹdeMHK>r fڱIq63&Z4"U)ly;SMQ" s-yh 1G}n#!<5 98inȠ\)Oia/:>&QR$%\ ֈ ZT M9Mx#dR3%,C1YiX0Z8k=Vդv&uIM33~IC`d޶Dw-]=L(mhqj"QJÛdOՖ?;o~^wqyOvt$ϡrEITQC]<;I SrzZJZGr9^B!V0<)D# 1HƅH0:$ZD] ֈa~>ߺWwSڪCOUv[zqOz|m&Z6 hJ;jPݠ=O F̈́T@L" {]1.q? *n8h0 74<oXQOq7[nl4jr:56fC/whcwkCTlqe΄2H#xfw +u=2~So5W=W &$(Q|3H|Cژ>P]ȂA#&U{ϑ”:^ɔ>)F)ޑivʚ CCǾaji; f=(i6>NYDpcH,bO,h ~= 3$c17!^ԆRLd v}۪͹ ͮ[o Cbfa#XFUόb `4,Ǝw)Tә O6dV#<=?`Ҭ=qv`&;ka)paסܴ10cN@)0LFY f7`|čђr_=߁oˇּ`VG}ZAa G<}sg ā[!g,?â3T!x:GsaԱlmB6&xS j\ 0xL? %Ȓ0R\x Zp⑍z_u~ISG2zHWvszߙⷥ(<%Qbq;Nab#Z-_9zW*P:t*Qe;9?=i5Mb_[S%|{$ nɹ&GWzȄ-22ZZ" &8Ke<{7kPmvnLswMx fɈ 0\tu*דQ*`IRr0Bͮ;t{//@LG+^ׯWtg\3 &Cs{]7Wax2suHPt*x̫{pFwӚ4#@pz2AHQw朒+8@qCt34,5z׉װYԬ@r1b] <%EhN)łRqI~[̝+QB(s*( ./ILmh-|t1`iDNm*no BاPW ^SRbI@Gm{$cܐYi'3Bk d0νI ,/~C'|lv m80@\*3j2~1P5Mk"\·}y:oWؠg1MKrNXnSh3l2\$Q$pY1D9K!, N'ͅU<;mψ*ϸ_ߩŢ58}FMŜ'+*sr-omU?V+#5kXPk ս.rY1܈c^a!gOoMmh[@Mh퇯8s%˶N\x<( 'ɞSGcI=K[` TZh ;4CzFHɁ-6mq@Eiv^ic l?n{>,n]ֺw*ת:|[F~\4L:i5pG!wN#I,)L#*j f+*R`7hx`[Bu+Lg Kw~(#_0TNSl noBor1/z.x99˻:gyW,Uwuf `%% y<HT"DYnuM9R(x*L\d˳O>;/Qa3,,nb#@y6XӢX`AhԁE%chsmrY9TK:RxSHJɼRPeE1I\!)*B3Y:vԊ:vBhS{n]IO/qt>Uig!4[:v?:A=r;ejAwVh f+*J|iAcU ȤZz4hّII4Ad /ю 78WY}|}:1Cf,Vou~W_y}q}>mNʿ?lr(vۙ.SG_Z=3}5ۇݺ|>2n]˹@e+ʁ0zY3҄ jG,O1l . 8Uvd.I,z,Ze;X{&8EkK>Up|P.܋@:Uu0ᓁ>I2i qO5’W,/(Bu.'싨hZ:i0d%:LNx.higaL\@PJI2h-1|0j`Sょk4ֳk39ؐq5!|V+,vyY[o>k&)F/KeׇPQj*0܅.$] hH҄tU| $&v!%RqEM ́Cv$ I8!4|O}Ȏ\(G.uެ{|,mȥ6\#񶏛Rhzl G>nfl?"a5B )w%7X?-ᶏw fTI{L[޼i@0邑"B]݇F"rxӻZMr\l{u&4ع0[-c>vP(tKT8yNKט1lm|jʓM~PNSpv.rMfң} Fo,ܼD@V6ek׍ݵ4[N1QNr]Huak nƸ=#ֶ6\ޗfe-jfQ4e*qたt}O'5Cø۠ C@@TsOBwm݌.~-8sdM>0y[Ev8H;8R4SA [E̻4 #p{R<Nn+7.Y5 | @eo`8VΆZ Q[?9):ӓP . Ĝf|=y>>΋|; tnOGߞ_~:y1|w1^/N~]'5rp9+^Bg0-j;BZ;{e݌P_U>>oO[ʹx9^u1)gu=|>CsI=ʻ;4M1G(!ºhfՋ%ߔUxWPLcI{)z>><p+_~_(~@S WxÊ񃽸 ^'Qqxϲ|7UjfnC3!Y(mP;DR$RlHDS'!YKUm;jmAk۔e3a$R,&E dw55m\-uR$XG2ɐj ɖF"x 2}$+k)'2H ZS-)Jɶ5-PC/AԌ(.X Żi*"9 1B @XO2 I?,^9%@@ b=Jo0ѡ ЖЉy!:OujCTУڃs( %/FA²81A@`Ahiυ"B#GU% @OGh|*w=L?Tw>b-"--H0&,#X猪/8nG񔢍&jd3(>q %%,vCej lʃoi#)-` ZY(hH͝WcINJ*t$I@"*RDEx vJ#DWmؐJ}5-0'H3(|נrG+u&%emHd9 d$h 8 $|0h(.}Bp`֢m GԋnQ-#h!E 9tg  Lrs.&DHxjFZÒdJ&e%| A N&qǷW&.h6Av{|JYZ_% K.jAWdr +`E{!Oྷ( YjiMX*V]3u˲>eYEMt=j4&4(˲X mARxM2=x5ݕC:+~81Tzt7IpF&Q7!K^ISzifPLkNz(DG-Մ4ñMq&0Ƥ7vShitv'= #,L͔eUb,h"#rntJQʠ -ru+ PP DWՔnjSII-6fV*mŁ m^8゠J* i<Ҁ( Oa lж R!%&n\С1ͱ&FvV)OԤ<Z$=eW&8#RjJG Jй56 C\{bu*M`Tgj) RD;۠陷9/Õ!I9Ж8H{A6y`cF!]Je) d`'Ïe&k) ,4&{ؖ7I_{@>ﰸ`sfF9_{֐~QOXU,VU[<5DAw4( AJ a?#yX45`{޶SK}e_K5YnA,c"D Գ XE=D"Tvɽtկ&TBhQ76[=g}pr5xrr]UUpTbb,6b%a)܃+ z.E*8;M Бlԗ3訝z)B 0%9o߽;Wz` !'{h@Gn_[ }up"JH.ME#;(>ƽ 2_^mh25T7d `[@4D0?D1T\_֔ 8-rًkB5A'1eU 9{+$qZESЌ6!/}M ПlD=عU˺-WTy&Zm̪\+~w/n!] ʚK{͠sԕէ+n1׎D3F= 2wGwd5ۺih7pT*<2+_inHҗh7o4@yskԑmc=Tz+DK$Jj1PgP4h#'( Nwv1 @ ՜( .B{:Lb::gl%rĊ ѬD1E#+%H$4/GasE\ws AEl:x)hCc1 2*CAzxv^6zSZÂ1պ  MT򐘈] Q8+z Ypb88wCѓNH]"Vד{_ky nm½]W_sBʂձ&.HN[8cl'Ȕg%xƇ B+Qb_r' H $'-ҸȻ b:bd'/!bҌβ)P' G]rd`L՟-g LI;awX_FvԹ~h_ BF诈ÑLn,կ"F&;gop@ƥN5bP.1e;3Ȗ\"Fi|Fuש PgQ\ʕbͧ9Kzc4$_I2#VcE;z7ȡWҊh y](𚋏@ sݿ£Ӗtvڞpw)G]Wkp̲*7MRD"Onsos+N7a"!UԪ~ZXrU\BzC{Tw$%Qz.CG!‫/Y:i(O[w_Ýcq~87oi̒g,dlJWgda‰Pۛ:cp#$n#)oV͢MS' ,f+:XW4pۮ$>d.NbUhKOYNmҿlKlsܲg59CLO&:d"($V0?W\oYaOw0-p {C5BOC$ Gc}̶# /͍ϑ/o1ƂYIuI|Ob8}D=~.yqD"-FhGU>6f==*~MTrwctɥxL+Rb4}=E0ݫGeΏ&qcrJΫ0v!M13X IYNQR=TKacfF okS? =~O5&)4)*G"Ld>EA,aX'g < =e4eUV[E)̰H$XJ `TscI\ւ7H̭ea+1 k0fDKj {1tTPK4.w~!*1]ҳ WURi̥lg3:;W0:rJ!X҆ZGu?FH?$,N\MWWImv 4}ˇЀ),`93wj; 88Jj: P[O* V eѐ 7Ȓhv)P<T+MN^'.)R[_X+G۶wPAx'HY4ľtkG|Bn CkWֵ5,eכZO<4,D|<B9g-bjv{qYB!lBH(R$U:$JƒE9fd8(&D6}YF?I;#jHiT B>&8~||UIP`keYI&W1F3Z0jEcGj>ż3u$ V]}`1_2eٺ^5~/I"T7ѱF'„M|1^b$Wc ćVqGH\: ]h/(P:h.-eL2˕#[}S-)QdgH"9铓{#u _*JKJ+s{R Pဍ֍Vfx恳F(Oc!m{ûzw Bc9~^^ݦbv1%T y{=-ѡ:E#Sq`=F6DYp!\Et }ԺFs[.bT'w62t2=X6rҩۄu-wAddf"0inr'mАWY:%p?ݸ?B25#HĘ.NP\#vf(T3 Q6 TJm LJ6iN(l+QՊޓjtzG4`V&Np"])vTR {wA`!!UJZzsLZ^Vc*fmMB,'G= !^B ESHTGk!q\ H5+xqz?-q$bv8ǘ`pQmogx.q\7c-^U.}֖gٗ8HDPA\V:HDXZ_bR;AME -PZ 02[Z=% ]-P&nzn1o蚼BԜ{w.\T[e[;߼=.3lޞ۟s.|(FJDAf@#Q)<H7rUO=B#q(kѦWzDݥ'1n=~RzfN8z%vL$x'Gc0IKKBjLdTIJMXH_ 'b;R%HPA1[LlN#%OuzB]?" ^7<&s4J_{4(PD#201> _ɏ;*Y D>mF;}<$9G4>xpGkHAHBsoK.(9R{xqtlgQpdT 4oTbb+|u+%/$-%ez.E*8ˡ)IJַDPRbSP :$UxJMY:F EL9]cpYbmc iO121kc$Wθ?HB ,2LBC^fËߩOlhry:cX/ԓYZ!4䕫hN)$!X7Jey<#ode/אWG:Y@+˖TиRAcБ "1DZUpnukS!ѭ١`(#քz$o'M|D&c%+`](n:FeИȺ uS￴ Ob{SQc-&z$9a XҚ0pI7~R$}%H%tkA_ٷS6`DգwH !~zr 1P4Be}:2ji}ɬ,u/qȨ^Ve1mWմL.ZǷf'Xkh,QG>>hrak^^@Xk&D!>x'I&G;[ ahc%"/GlTnkʯQ%ڕq7ҥU&HW䲲#GcZ9RlceΘ ;*[sƪDS ӷ9CJ~Ci,s8qA(&k>f1S/,uFFS/LAW1G8dI.LIX[Gd2Q % SP6[S orT?bVj$\Ǿ Hp %vWLI *P c Q'^plRۻh`&|A95ܵ6n6{[>]>!޾+lsfmON* ;{H#vOArSv9BjHw $Pz|Z!Mȏj6}]$ºTV6A&%5^ Ab/(fE C{sYc^8 BYC f 3߾><=9_m{/*xu+=S"C=$I v#%"5imZ-f7۽䑻NYB Alxg*S&e@)W+"l'yDDO2BN^a 93ͨb8yaWmnpwt2@1Kp6)W*=iJx'p ٬I3,iuk7z$syiKk~gӛYRdQrzՌ.m6%y60"CB.)Kd])g kS4h4|UJL~FPcme ٖ\M1S\A;E!)qxYqtc#6zuc>#z 5Gɋ`TmTnLKDs1uwc!"z2fasL%BOc1o"zy.b(CAwl/;1 ,˘ % y @87!!S6@"HL$wc~&j5:~+ژ-kc&>nEx0nv5ոm3d\7F1 5JmqODM-R4ڰ@0d\HSZ| XmIwۼܡ&JiIrBL"~)*!Ps$ 0 m/bL :b5β}uuuO_\ Sa@J:7#h)"HIHF!dgJBa( k!bEɷ?"k*Zmr\ҢI_?==%ڎ#<]ytźx+Z/cG=&yu3ɧf'iog3]¤3Eǿg֛׽gcϏ dl` F8=yx-+ofj?sg2 <' PD?{B :o"[JZ 'RVPϾ:k QϮ:3?Yj+3YHPSlUܾ%&Z bմ\wZe 2 xŠ9ۥUrВ,WSH"0PL*i؋ iʒ4YB^'q0HyR g5) ^\) p;0T0*/y ڒhb #&{&'\Rr*iC 1v$9DS q(Ȥ~H4pfЊP%.+*$9F! #: XNb 1̤60QB3,'APr֒Y^*bg8,\,!ָX[2h}l}5 J$șɕ yjprdH`3aP2c;SI@ab Vׯ#ژu$-oN:bz 'U]=̦}{L!ksDJe0O r ֶk E4S*r)~`jԆ:"EFkI5@ ,BT2QH9@:HS rDC,9K$qR3uxퟩ3ݙ( #+u tom)h=ʭ@U3[QQr@4$N9sz":\w !Is[4׹cmCYKYg<3Ks-6Wc,3]`@j$w Hg,g^;, ղ֕KBC N|&[-/vʶZcTvfNr9w b+̵lw9H * fdx\᫔zr1t;)v\Wt譜ڸo귛0&0̄^S2`tl!u֥mֺ߭x/L!@f*/[wV ;j]ƈTf1dcΉ(ūܷES))c(gD~ꣂ))AJA/B"Rӻ{O/X fN% "@}/ ^QeU~~O L\H9,$E\@ dH!0\*z8k228geZP`(gT,RZ[aZc)p4ɖ_ ~'UOT._mp[\K!9SFM1WcĤ:W0pe4&U9GCjŽџ3ϖ|~\A4HќhHh.E@ͳLhLPadllxLӟǕI"TpFbM(4g)TKVYFr tj=u\e73zN k:Y/"} - 2>aȱS9, 'Pά9P5Ӝi֑V26!YNǓAu< K?jJ|H@=A@jN)PAiԷ}WJ bpf Ă!PRf( dHw/kuW> /Pky\AcKuá2gJh A׊#`0M\DzN tˮZU=xhX5m(+75Yl7NaZ2Y^޼]7n7ݝ;_h֎w^*mw+nlf^N |+WEcEϚqm^3;}e qx95iޱ~շJ[護M8cݽdi5P LRh&#T%~*^SQ-b\{}VdП""ux"tuL"]0MhMRESWhqrxdBLj{^Dk]E>^:a6FYGD{biA3% ݚ(iwHl{Bgkڿ65Z1sHxը %z m219)R8Ycs>>ψ/<vW=ɡRk@tuUuկݨ;h»H"JFm,  9`+T!%EkDDʵ'MV t[^U;k6TRutJ+'Fa2qS% _*]߹T ܔ/U]t»AYҋ$Y|J=}j8^{!\7ȴlfi󓿔|Mq&Y?NRQz'r 1Z3Q !;|,ʟW(___mbua.tWY*^,FG">5F}% KGtz WpQHax)Ә 9̰4N+ !uت (Axez*aV߸+ =/f>߼&żXEb- .nbAD$e32RIEm_OTa5/(p?G}kό>*߄Q7 P+@=3 ( eÂ!^ 35.Qu2i``F(9@<}y5Fka[E tMr%:<ǝBk8\O(}kr7"N1.Z]ӛ\{>t+X0v:jtK&\%8}dFm'UlYes]zt@楾o8X6`A|#'gbɕM/ FLGk2c?=J>wF*萭uHPDU&>raΌQBO+%|<["i2Br2b`Hb*?8C3PlPdB5&6vھy<+Qd"nm t:}**u]Rc:@/ b w$upT1Qhi@Spr,OB `O@'/EnuO5 T/J;eWjB ,k"iж>`@x#aޔ9!jzPDNp7EYdUS{K|c=Qv\&2p1`! ) KKa<5CRBfJ3m+7t'*VLA,0=XUG %KZ[-ϸY ^oB<f Z̼O_8{kD+{c;Elݛ`)b{jY%G<@9qcFW׵FCTjGNoPсVyn@r;'*xG:Q\RAsEfAk"Ӓ-8<0 F{s \h)c9I1*7O==l9n9Ty5-'ɜ:hJDTN4!u8:H.K ]aXG $ras9Mp\8CB]΄Ϋ8{o)G޶Pwg˅TyPKx eĤb D ivqQ*?!Xa<|rrW!`b5'%0y!V#rm!X< /-r'` j0 +nWXZEZZU sX#HiWA5Ǵ*DiPA#,I04'à(gaHmܜa80ygB q3 qWpQBf E]\|ɸKTɸHBLp2ZkJ&#b0RXbs"3lA"Q@$PI0LYD('SMRJ((#FK2EL=0}ЌDTAލjM5A0ǀZ/lS1NFW1+-3b&?Kԃvϙ c^9?3T+& \6O-AJpLl1( B<#?@to7mIR_{B AwQJbZh4 0gp|>bJVCZ~_R+]$=ăWܪ&9xvPO"*,rfziY,> Q[k Ȍֽ1xՓqY"{݆v9*܈vZ;^)@*d<6r%\ﵥI飒%e,Ԏ#8`śoQ"f+TCV5GqjhG(O8N6iK ^~q9)JBzYyvk[3-.Lm-lRO|;jU[mo?X>v1VuM-,C/.ua# ԞmpߟAR\Ⱝd.?n $Cy~j:yyrlӽ0/drhB|W]Pܼͧ'd>9p8͘Co!H(*'iʉ1{70>0z7SIƽRv2ʖkefN2/Cf}I)V*JYnA51BSǢi}Q~s~50}~Ff1:0 nb?~f^=-.aPj__9:ӑ /톞[;]7wZmf ]L!x fXZ>|:πH$pz1(N/FWpQHwD@4:&4ƸJb,CH*y^2AޟLov(8wߔ\(J͚}y1_1lmE} l'o_eYLvaH%E(?M)m5/S|kp7vٗ⋪~F0.@_$p=,3Ia  Y|*Eޠ3^L'9(tMFby`G Dر1 %7'}* =@/| &-s6T\Y7 r"]&W8E<- #)tݠi?|(W>.?!M OC-Ͽ 5Wu&ve~#ZqƷ?ocReo2@X|=GxMf̨i*|m[پn˲?Ҕ#Kko}38hif6)YKf 5MfuQ-nxri-Z>4U vkn|wu4/ c0 .d4\z_Dfñԇ m*_ u,3MO U ҎGs#腐,'V r1I`ҹI`%:'1qozĤx>%pӼPSR"չ e)Zq.7RF[֙3dZ'Ѽ\u ]}m#fV;ax^#$)pecI5d,VORNENL418ΉcY{F+?Jr,vp;3A2;_ݴ%E2WMA=l"i-t^]]U]]\`+$"Ehkao-7U^!*/LlOU^MlRrJIK4P ViQa] xKc*y+c],GB9.ZUA+#!BkXŬYA`BX 'H!+SS3 J,)rT!.ׇKhJ*Wj5α"s*gXYA-"֘!9%Ω溍sCSx<8&6y>qNMmqNH[㜚چ>Ħma?m3/#aD0Q둜Go잓G;Nz$GѶ{$\ԦZA6i$[ F*vZ)cP,Wr* <01bGHİH}>v? )x5iHKmd0bd8Eu#1XFG b_5**ܡiC 4ɷ(5V8򱍘h R`J1BqDNAr5a<J05WZ˳0y z%K 8wf|Z<\ &1\u~n;Xn'v)PxUif\wv U߱I'%0MTiF0C˸l _mrLl؛\tʈ~= + p=hR}>Ʈv`lsnUiPDtv;]MiNڭ2E[h%R}}F?VED;hӴvhQV|"ZI:žvӹ'nUiPDtv;a< x_NUFvCBq S8\c{ S|iU(NJx6*#ZT!!߸Vc= nVED;hoO~1VѢڭ EtGTAR}?V}J N#o,lz~gvb%3r%OYijLAbHct#oSL.-jSJ. mJv)BnSM.AbxdcRM>EJv)6mջjS_0%)6eMxKCHn#$Gf:.FVuQl?'˗DwzhxRЛ[^fΥ^/ ޥ1ɔӠSv@%ɏ^vdZg7 -}t}pu XPYw6 ?ķb@d:(&+!x(ʕ`xMfn8N;?6)t.L,W%'E\wK'(IvK(H;QJ鵔z(J=pT|REVd4FMGx^mu9Op\Gx,`t;G=6D:?ftuAO&\\RUTJL|T R,W[&=b҂Ĥ}R-1WĄ?#&+>"`m$?8褿f _,W5t )*{$4em3t<ٺ7@O0H%+zr?`i^u b4K激y TdAX6yZa@%930 L-\ I&{3ꦸOr }2Q~N5\z흟NR|<Y!l2Xg9^QMP.w[wvD9;1;¶j!CQB4-pJO5kK՟QNmFl8MF6q$f˰CګIXH[W$R QI '1`HhH<Z2{A4`Ezixi6(Yz1鑳`bDơZSH ~Vӽ$VLl-7IALȣOSP F7ݤ0!~ˎOdSplǞϩA}F+҅cd1.I$> &Hy+@rgR&&q/ 4HZQtަȮϧwÜx:Puq;+ȨKHd1g\I/ϥЂliXe,rsy6T%'Ţ2 < Liv" B sI vuu:Xg8 0Ch]jz \d9";>9W |׆="XS=P ,x~6m.GsAF6ӂE0iZqVPqsK!g^9>Xi$-7m<]yqysgi_ٲm|*!V1]h/ PId2YӷPPnêNg\./2SH/1ƍ>qˋ쫋hq;_y-xx 5lmfϟPT֙9 ɫfq,Wv NV&&a@1|'\^&'78 绩y6-&&JW BgS¼}5ǂ٭ *\RK)\G0BTNG=yݛ i請0\nIln#GOVY?r>l1Sϓx@bצFܷejMcp3Mw9JԜPiϫa0NaP~ yꁋI:ue½uЛm;Y6<'d4؅`-Lc [;M̟rK@/"O@Tq1l?&kŬEHhyp]Llz~J r ȃp+FyF`'/2Bc-u,mEFy%ɞ@5 b#}^'=+д4@9t!7+c* u#?n?.^#͛q㤄T8kC mm0yna-X>`yd2 J6D{b!ҍZbnhnǓ\B(\E~q)6u*lw!x-n+@,%QlzZn}>+ac0 e,(Kj}wt{̤ywW&ޟ%zCie#B.u)C&͗bxfR4s:"5a|@ft#6k'`N1(?e5ˌ,#Sܰ׿H'0 =w8Q=!o(ºc9 O*BWCDcšK(w;eJrX!휮cn]i6+oj- cC3J&qq&5dQs8.H$ q+(h4O?6gI@B(fd000|i?6gI[B ( 8< M#D4څ0 b9˵`̤#ǁ9t֠XPy`=r6uQbyh3N/3a퉚 t7w9+ߘPcA7d׵c:>X>kW~^>-fÁt\֠WzK" >sŁD; Z3Ṗn)aA@,gRJeIxVeMw"w`΃'5I5F@m ao~g[bF'@9\9hWE-wʉI`@ʹ3 e8ZU^UT/3'ɫugVæ =Z &81!Xk ѥS}̓|N)f-S`En`XRtf:OLͰATb4@Ǯ iQDz߆T٫yŷ /]A}lYضU4#=o~YK-1gX#V{2nF5:'o36[ؘƊ@G|r <߭EȢw?p }FC C,w^ٲ5g'yqUٶ[hȮ/yo%$0NnU;RIMmX\빘34y&ٱA"er4Bw21 ^3WO |" t@6ӲN w=0PaV=Dڙ)< (`F`v3%9ju"Pa_ɻ;7`x(3"Z!B5s8 |Y -+Ԟ @{ukĄ) m}:Vn D[s damlz=*RB3]&&J=橇”<|;fsc<>:Unh1;܀w!Ndzo+NZw˧Cه)"E[`<{!}/(@i{SRQI<Ԡkvj5gpcs&F-/H+5طzV[y J.)`Y F4NH.Q, řR I%))Z1`+|mi"xdߵ8 QNJXj?fys1Jg ~>hYH{PϷNMhP47(?k`A J@Lb UFh=*.S`NDO*s̉$ ̒ 0Y 0ޮ\.RNo{z{0i sXo̲5J?q"USR:{ !{Σho򩗦jU 8cgy5Zpf1#}1}"M zV"8@gzm}PzLg[+D\ b\7]3^H`t+h3!J͸qtOE_HAǔEORȕw)2 7OP]N j%ּ՝|TIEQ?Rx^+(ԡc7w#VcTz8ÂePDhw|5ݧN$܃0]1h8k/ٮq#ޚ֣`kO8P6V. t pF=C/ w}L#L&p(7)9A qAEUW됮,A-F jƾK̉tA )L Cz"A6Wau/3gY 2N u[1K`zhz|v5L>a@NvF^: . { 6w~9ŇRFh:-Ch<(fz HLot$Ej(Њ{7w&CW [v6CC7Ar]U+qRmF! {Ƽh`N<ٰءܱ7E},s8>I6"ulFqjƵG^O89gzsP?XkbުҎA@lc_XenP×)vW<gb\_gyȭq XXN\w56K5_/ аML~Yfkfjb>/zmzzӃ2h{ RW cZM)__]8M'J.RۢAYѢ0gDqҍHlj^n)܍|vH_PlKӏSȗ(Ln!:$vS:[mȮWQT<^p7؂Unҹz!M{.7~ӏi>F{<{͟ %>;4du8yV7tO_znhkܞH7V^Y*hδ`Ľi]U΍lmͱ+%}R!iq.Wf&u3:#kk2ۀeHmjb0͜bgzfafXk; $Gcw6gS $kĪkq-Xu-D%S !b/7./S/199giؓ 7g*Z~gJg*{w"3*/?pNUٻ+țOo%ֻaBf4 ^`1(hj}?ǿHMg͟~=+ub&^jKP,9$@~$CH"t0)IĜ%D%\ID2<9FP$\8DeުTE02Qh0~2+o:fU}B&n >Zߛ}%Jc~D{~|wz=|̗?ZH"^GXZ}Ĉ-"KoK.`UWzI/\0]qR( pHϑD?LB rrHBq*$͜N CGSomq#yhNL)SpB,Iʘ$()NʶU!TSaEV?5i6 *ZS]xJ_w rxGymJBzf!rjY14\\o^ari=zcBby5ihGciu5HٕQy߼F({5=jvi-KVGf8)U1mm`J*VFKt3#+yw`h'x`e=A s450nX({2(P*׏/{Kڋ11 9"y2ZVVK̢.gVfAH`E1 ѓ7u?}6'nʱ+ݕvK[/-e_r$ɃDPT*cY_mXslvHnS+_ y(N4!=ŰY:jвZcR΄hG_W&"\ugRbM\`,Cs@uְytpbA s$ sg" x[~%(,H/VxẢLLXvmLG̣ӸR$~u_ޜ e1htLDWqSf/-Z7'9z: ,_J;0N!Bш̪~91w/&@$髻]蒤#ILRqJ$(Y qH,322FN) r鄕?W$m}w\(kQJkYh $ߘ@ $ߘ@Hm8)\f1\aaRfLR.RL}] @B!)kyWy]O㲚!]ټ+"hv055|\yfwj ~y,(bPj $ ]V'|J.#8 Y #+,M}3l68%uL(#g}_5)Ej!p[Z]]+&6PH1B)I"[JK- z})3lBJ1I)J3ے"%9Q:L@r< HVlANrw" q*cZ"T}Jb!jf2T*y4 ܉DTWkoowS&]NUg&xD}elBFuw!6I#1ha4ckOh390@1z*qZ*pzp|giu.}ч(H[M ws L[*5AϤH%ӠѝWx* !,;Jt 6c-|"$9 d.0HRe$ae .$UdR9l 3rXI@UGm6ξ?pO`yWˍP 8dY2{)$^s;Eqn\YetqOL92HmҊ"JRB.\"?0<͚pQ`ZK ymEBv%qs1"W!HiPXD! l##{s!B5:cE zg]r:K`=Jl4+Pi?$sGh!E1XB) ,S2,8-@Hg5bfLWs#/ldcװ;,qEhᷠ1lHܵ1/"d3a'\XU:°~"U=3HX,Zx3׸7 CLV ^@/Ǹoc-0!@{$O#f c"O򸖈sU[ny'xɓLW>o#rwV>,? _-6K&C5C!1~ >ITFk;ڗ #Dee3'Z#AP#[;3N-S WlC@bcD"!TC&g|8=z\?Ƨ#v:׷xɊ`aXM8CZVNg͟%gI 9~04=.EN X {3WP*^u@B^bc&/!}*A4p%$O R)H0xOo<2יCdpLhrTim+ϹX1\\PF#ks9fᇔ9Eǃ =I+Jl6gJ5.-VV2`@| D89'K j B8Ƽ>2񢖠Α A,x.Z)00u_$" FXIIv[fj]VіÎhHz6zoUV|fu.:,^Y7)wU Sˋ2!N/bilޓ o Wgiy)3UpS'c?ӗ׻2k%- p 3&[9OR=B=~-ZP;1?μ]s&\k=ht{Уr<]^A7m~'+xZŦ}U<^ A@ҥdim*ywvfY l#qӽ_C+#zb⦈rDCwoae$fB ģɤ*:&N㭛xush+*q'} 4lA,2G4gN_nE#&SJl$U S~H{If=VwdNL66[ؓdVcN$bf9eHgғiUç<<9${顒LstRritF'$SV?2%QˇΖWR>~)Ty!ysD[ߊT = Hhju @2%P?y IUKԫ+Xn%z$[/VFA{knda)㒝}3k,ܗ*|Rܟp>+Q%P|˿F#z5qgoK?5Oó򝿿YۼXS|t#Ag//5Suw.\Tu2P7;ܺ>MѾPZz/% y"@8BGƏfs~bE?.xzKͶOZ 4O|@K XD؆Kep]Y=a ՑX4 Mcqxj2rמd<6Ck9 ũk;op$IHv)c};Sf=Jo3w/~2L5'*l*p?mD= XVkzTƱ$?-9`4hJT9ͷ]dEv?\|̨)h )&]O\be.Dh2&˔4Cb W&T>2PQ>+ }2 Y!Ve;]qp9J2c1nLDe<݋C" }D1Mz4uLP.~X=W~aGS#)=j]4e+ݑ{zv2%hٶ(Jַ$w9aH/R0ܿ{0& J!E!Uj/w$7gbPgZOU8BQ}O ^(0-0"6[p,ۆ%ph׼}e)[*Ѷyɓw<vXǷ!=K"; e;K ]Ym**OijQJXkТ(KcSlp=}.Tnq|||j266CIݵ_mnBHL|Z}׈OyCs⩒ӓ+ I')0kB?ъȻd`BS_o{3{~f{7}S6{CA $ff5d&|{."zڼיqb"0s&k͔Ta`YoH.HRmL?"5 |ПgET^AӁfj V[bǦn5v?Zv!~)˞~{ w-u bL`RSeǁbiGw>,?+t|2X`i,`_-bb1]\/ Jn "o?q>p'IΛXLnXM뎩ٰ?̚KSHʖ#Q-:P\֯BTc֭x{Gk ,ثEl1_.Jh|t) KNZ),m|'SKB`.`4Q"Ȟ_ݡ* -UU!:l'MPCkoV v5Pz|8nB7,W݀􉥥[$3X:3V, ~'J_&Q)IOo!V^Ρ'd.S)I}$qQ}Gӕ]=|ZV5C"`d>}_AKe-mԔ*.>@ㆮU剕t8L!<Պ{jɨFzO&m?ߝMĴAu@LjwWNZ/9G h ?Et%E{Qç9CR^V&4,1w}i[0C4{߯7h{7|0}cW;ld-4eFҢ0NOtOբc%j&N?9ۻ TfT+}'+uNJq6wqjl_W,GtGX0荏X\rKM4 BW>i8ypTUpVj8S]h6|I ]Blqsg,P_a~6Zl=RL_273hBEoa*|r%jXjvL.^VnVk>^`K8}(lB( gEIo^rqp㛫DM_Wqf5XَLRҙ7of$mc8r7#N)Ij%.lѳ.*:PV ׮'Q"(M}3!OKT?.SrO@`NW|ڃRb%*^}$Re+Hnʾۇk 4k?Y9o$EZQ03-z(?3 >߁OwcKqQ_f`O^R$qfs!3r) rX2T́`)#2hTG,R]O K;3r  N"399IL#Bs0%EW.U"ҹ}T^K$8reCgҽԝ=Qʎ_T?2P]ȷG tX"îJy;^&d!yxÍ́l(Pd1; BvxFz ON`,A%Kkv#;|`j%bp]0(PjXF [`:̚X.cG"ݐ\+DW Ng+R{$ 9ڋeENie:h#xDږI̭Q> .}G+m9JEї M ?.I$m(4f&/ z4i-^^Qcti$yKJ0*InbA kHZ\Ofme9D#᧜"K85 T1=INy<ݚV͝q, }i`jCG`q$V#0^IXV2`E #k ZF.l`K} 'TMQ-g[QNM| ٗOGpNr:La]]^d~zj5S&=|BgJΜDTMە IKّ^ONY 1B%q.Fdq.?չXmf8wJpJ}t4nFTUeHr|2NTTLؽ&e5^IOk+liwgLʩN{L[Nڣ38Y"$qk DbƱ0L_^ AZ$Loz}/O_5ڽ /%:ˍJ*%:,mP|Rq>0 l!ݗ:"L3Δ'"&J3YbyEsJk[:d(1to)_C |X|W)OLwb6 ,ػIÔh<٣DKAj;ΥMzrFpQLd[)E/ (m]غHj"!{tMѺTn^v@ӚlwNm[f3TiMtz"btR֋R; "ud\YišA:h'FcmMjBh3f{(YFjٌͨ 5dsS HN(LHꔕ@*[T#01APq.frQsCSV%yfC" .O#¸j˓ BW||Dntqp\"h~1Gwyk/{C.Rjjn+ `ņø0pvgv/_zp\hmωD.H5V8vǼDoe( }EPD"gFCHXQRmeXTs8UTb<|96vPB`H`r* q3nsʄ5HrMȱBמ#rVt=ML7] dDnf /*9V*@1=E[xLóRE iuE9 +:]C"Eo] wQj s(1I)Jca.JUg삿"JEaX*&GRم]g Xi j`a-cke86e Lqx9Zn s /=U./\rD3Q b!-AFq<Oo.@" aN49H$C R ĝt`a <3EZtqnl:Kޝ=ٕ 9/8PȄ1$@P <xe3/;yƒ! TIN% 3ÙCX3 B8 pcTu.12 pLg9A"#s7(ߔj+z HeW['+JdQ44}Zaq,VJ=Vn|mҝ 5t7wxaE~nɊb-&'-rZ׿ރLf'?w[̓?N_&O}So0R6AE,E0FD֮MՆ8ae9ut`z*M"7$'Na %yJ;)LpH[I6(i٤J"ڊȫ"#g*'ӯ1'-PR̹50A`6Յ004M.0 [ eUqDÓ8#Gf)%`U(? %/D9!qMI$:m&$W S{vlVI;؜Ug_ Q>euP?c'q%QDV C; ųybUV~jXF=hBUrݖ^ dy^J҇pu6\BVKeFbvgf0(ͨM'#SȲS9gT]oA<% D3s\};ɝI,.-;H-ϸu8۶r٧32r1B)>#t |(r4EWtr.-;7ן;PuvV?D:.ǜ4`>uhc9!3?SRKƐq?7?6s+cն[βwrٳmpIYۼzEGnV-מ;06F4RבmvQ rrQ)Zvfux9rc)Vv\n$',I ٛI~8:yת5:9Z֯NWRK1H1*;"Z9"Z9H9R>e(ye'8gr!$;M%/(8}xO3ϼ#W hR` Kcwjo Meklə 'HFkAj`8OzP@B(+4(Z+*^ctZN0$4Y(VfwΒ^RAz[PAz2YqL+rVF&#ڗ&RQ c_jH&Z{y*ʔsgs0JjN¤eԷ7F[>~T.+ዷY`.s'Ͻ(aDx gY22eF :XX&S܉ 9Y%D6j$1 TINMPqXpǜ33 FL!ufb2FMQ4B%5KB[mW#>Xi4~o48e&}C؄Q35=m s5 \q` X-VDLc`K% 0juTQ$S <  s f& `IڟjOY7A&2Lf9>$BDHJT ^ hZDP__/jzw| *Jٮ^Ar}r&W,}ztE_ Ը+"Tgٗ?!~9)#eQNױ>DG{/eUO":PO)0*[uڏeC=u_}=9%uH`כ$W6YMOW=th*' 'νv9,,g]^ʷ]WP_vM=pKAޭ@׵8+Ζ&A]ku,}rȪ&zt]痽=wGVD@+r{zZ fjj(~ځPEDZ(s8"z'Rǂħ^Ygy!c+I ZQuKwRe [qy0jͨn\Ax"2=J|p{ T$#zUA{1*0JJ 3,̅Q(>&+m V3a3i^ 9 AhFjb04;HlE0HkgW` +U0g31c8eԜG &i4rowo6  LW n0Yߐ}xH`*(=̘!S^ 5E~(sB\›S%p,="Q5 l>7JLϋ w1.]؟MnQ:c {QχXg%ɐFe^3)me4$A ^^òOC:?N& G?aĴ^,ɛ]rH_=B\BjgG_OC.eǿ%q8 a@ 8qa{P;.ߔщoaRJ_K]_'o7+M][.>2W(ID )N\ͻj'R 7@υ1E^e(=H{N26L3`Tf4 BV*Rɵ~OwQ~%WKOvdw__6^~=n◼|t'u03cM<6_r_}`cy0MEQPW`b)w]#4p3ԻL 4 `B8ALQ% <x ut2Z~;}}GB+׷(!?[,a:4>򗻉?lޞ#4*=hqYآ_| [:= ; '`+ s/ yDl!~HȿT +6YiŠ'.=w?뻋l0ͅ~xO+{g6G-!?~=Ub" 3#mhSz}21 Œi $-03|Zg|@s3h2^+$ቲ"Tk& թLĒcFj%Xvs[asg0-e%*(e !#Xp!2%ڿ۰~RMu'OzP&Q|B'T Z_}B*I+ljpb_j8Uk_qi MиK j¬kEizP&P%pzP&HzP&h:N$'T NUzP&p:VO}BuDyS{/ٳ7.9~|QWu>f^GR:)׾fD@FqY0Ƅ *e>c $3sBF48QS g!pJvN2'e{A(uL0!t@A:FG91%eλSD1xa"01ڄ\b(v԰(s,r$}`2P97n4})[jlF SU'OID8j d\'V2Pk * b:;hLW!e9bufpnC; Tӌ#u\ M"@H)@TIdtk!հ!Q'<Rlay_at*Y Q"xc 홢K2Q0pP Ad:i`I)h"@ȭF d3(BgJb<@橃sx(CHY5X D+  ,%Y x$j) /<ڔBVBV$/נȖ yh`#NHyx'R!bC+pAPa3Ħ( !Yɣl>[y'2zb*㇇0|  w.7P\^w橡þblPCJAV)jx=C2aI䰻: OeZm0 {}^=^bY9ȁde+Y LIA}"DRKԿʇc:I wD(=" I}@cOThH2Jl+"Pq(ɵ=kP+_j4,:I-2s H!e fFaŰJc B@WC5z$h T*\ T7cjrF` +s]MM)ܟ6i"#!4|Ԅ0oq*%]~~A.~x Gثo"7F?ϫnL@ߜ(_#W;3{a mۣ7BXB!a0w1иeLVtT&1'T5`1+H FO8`̀xL7gN' 88NJOo}z:* ̓1TH-c1J}D08qIdH#lFb1#ICDBQ_u(jjin\RGd>Yf)p1X~-nc4M2z1mo*T^]>'ՇMa]Vr]# ۦH#@Gw$\=kP P K=֥5m{9r͝eSd\sK=>"'ҥ)RQ H`0,FN0AA=F3N~he @Yf6cɎGSj~$5T$hz!Ť,x_!N$k.e1x|opz+3Y~Շh@(s T!We93NY؎^ZY 1D9eZ.!E(W{D)*ة"!G.RhSD祗9‚: f16XNjz]6V5 Y& *m\wVkֵ8סL`_05?*Sdj(dON;zkO7'ɘQT?W RRtmx ) 9LH.i% ]Χ|99ek]5{x{u: üE[(9/燐XևF(ТR##nmɺ۟S/.S3*kG=|I3S:S4[U5ވUlT#73D) e'iv詅5^)G*ΊeKt_n舘o&oSL%gcaQ>O 9 1Q\xZ#5ASd' -;Zƒd&|_ U`-oߙIizt$?^պs|bޜm/Y4C+pyolAGxl3]}xvAY@NU" D(\[)3kn+uqgW/¨_-Հ#U19t~Uo^0HR0XG@G8h#QH.=DYqL_tdҟij!nu^p7:/̢9ү3!꨾;LI%;GE0{w;HgI*NvR("-So8-PLW&_8" #UJ7|=x~!a^xcqaEnѨ2#u<\6)N A.0\&aLpYnzu[`Ku੺ SQa|Z4 VhkcgiQ*)ejޗ:G l9tyuZ$-} M̯|׸#3T3\v`lsR{.*"Z9f^D. A1zxb`*!Pj1E_89{Lh(Όr,Ӭo&{ЫQ:%5HEjf$-TApSdB#Mql\%K枋ٴrq wB9A-K@Mp*aPc1^#uO0[-p2̦31Zu6%"Xozn0^%y6p} R%G=Qtw.lQu|A3|"<_Dg=+|1S˅ (K$Buw:ڏGƣIwrhaY I4渿줣ӽp;$բ(l7j75i邴ӫr$gBȿ}ڮ.%f4^]ir7\/=oRjuzB0!rJⷯd:]{ m&.P⫊a/=I~2iQg5Dّ__@-+RSkC^ςD >1M?)vF!R Όݴ0Eq>zlN怐N5g̱,T:Ux u4Fł;zYs|[ {ؐ?`:)˜:wϡaDT//OVA{ uZ a^gNK~@ƂEÙBE8Ci8 ` H'(㙨cSŐ*gNyMTЎDF 'a}{4XL *ƚtr5 )ՎF\$/e)]nX cY$3ƒ*D B;z`n qzIr"R@cN{-l2Jo@'1QK(qmLxCzD&`kܑ:MܬLrbӛdc/Ո|~\HڿB1,ګB϶ yh %íɤW}Z7]h&po@@KhN6$;;N8xF:~%$ ̨)`h k0X+v̱nb53]5) ʇ~i]J}~U]aoϗ /l`Bӂh ƣ(༢ D}[OsKƮ}"9eIF5+&Ἵ=&5 +UP2*S0xaZrF)2XyH+l^LYiO6jxxkC0A=^tx|8˒s ɬ5| j2p V}q=f#]: LwdagHy_HX`"*[w6NJٷQ0?VkZClJֽV ܖa57 6jjSg#o2T/ `(yw6L#Go7A*k/G`VȄFD`U^g=XTY<ٙt_Zc`*U;KЃ*[-aul/boY_?!,"㥓t ޭhOz΢cp->y)iˮOb\cZ :is`=sB)Q-|Pw JԡԑBuMA OAx*9hɱG6 3!6F-нRg"O,Woex.iQE)wRHħ&9&[1E+un AKR*᭒U4 xc9jeI?Fl'cguD 뫻$ Wfsg#UĈC,? 6]^睗G["bsdl|0v\#bF7(M)h+5khZj'+~q7w|#LLcΪ;CXv ^7_hA;qs^^ᴖ=FM^T8Su*x<1tĜ `z uHa%*iWa)VwrǔܚO6h4Q~}?l,ՁO$)x*st FMx+AEe@jR*NE#AXFrHFTpgH*&sRs0% ̀v8=J0P0ZPNQ !8iɔ&w٫)dv "ͷ[U |nALeR<.qYUE!t^`xw2'b.xóT^ %BuEgN.;h(аp$Rv:K(H0o<”!u0d%$ dj=^Hj (BE |0GZY,܂hBfVqFV F7gmrY@xs, ^$ H*pIP2X{˼vp-hViI4wG_!%H!U#yᵓ/>=hKVW={8RZAlÞ_= #S MU?d{y RK8`&q$*b L"!l۱J-pz F[1,jB@ +7/+ p VZƫz _u.%n5,|k ^0ۅOcpn_qօOŠG*1-  G[A @(( R]nBO1J>`i!6Ng9-8&d( ÉsXM$g4eTY.S*Ub.y'ě V*D[Γ|?^~{Y=k'h'.SsQXVH ީQI D)XF0Ґwf=ψDqu;S?DDZl?N-x>L5RiJ Ѷ33H|6vyr덭Av@K8e< ]wMΜVyL6`=`v 2#-Ϛp$~cQhZd:`DB^oyljkWw^5!O]elSMZuJ1GA0rbq@^JHƾͮyINC!iFv,l#0^yɜ7u#Q> J1J➀CZzG [5_Ƽ,l] 眬ƎHyO㞎0_ξ?덱"˩)5P`z o=s BwWgL>`iBg~2o_/Z-Iq{|[&Gxo!iݍ \O&afk^QRU1Ϲ7N'+)r×L?<ւ[elWwnÌ# >??uutOC+ҽBCfgH z絧hokHV-欟cZ;ғcUu+~ҹyy'uInpvF8@9׼JiB5ѝz cnq(S oz %Y/x 9 b`pddrd4Y,`2bv Ic06dgaK3 2Θ9jϧ)%i"tk:d+Q#/&j6W>ӥDMKU{vu&Gyo% z|N,ETW2kdM(| h`~?b&gY$lx^m1W)6!_c~ ,)|֌@d{ٔ5prGޔ!VDx<zCJQjW7Z"?.e!fՂcة0ʋ9$u _JqF7'xi jma12pPm (7<% nU{Yz.##(uYY*jljJM(`(rS[CBce.[s?E Q'O'?ٟa[7%K[Sm ~ꟗ87ѧ?ey܍4ltv<gX޹fK-a{7gк\wls陋R\Ky_Ά$+eJ9&[RTob0lm"!~&jCoD߁8f%ȅ\Ȭpk82>C Btld+Ak.G:(${s7WFg.G$"S;U`{ _r?5v;.g~fϖ1gGvfX0 @TjRy۷/ Po_sރ-U"cB%2a$GV0|Y\ iLJد>]ꏿbu,n{*$=sA 1|I.?>9R0&>ةYw} <(^z?d2-1Ż7q w-ݻlT­Ξ 'L ,yPNŎDu7^= iDU,>q$~53{] l7'%Bp%8^+ ℃Z9/{ Skp5K.X+@ raZi>T+`!.9sI#TIv&;MfI] fI|T]A^K"\J9 *&IBIEI$i (5H=MAeI\`iL*Aj=y񹡎nh#[yd h5P[՗HnDvRjUmzDDbYˢ61Qv@]a=xhzEVh jW/S>: 眒|YչOC-RtOv|=XufO7H`m_/5B #$!yTl taVߪimEej+-mҖ~AˌwHȭ؂[;f2i_XR x=:]5 4ubAY蒻eL@6[ B%}k@D@x PVss(y;J`O^"XymS@~Mtj}D<ը[6}@4ZXJu]D%k7ڟj U"ibP!MXuL.)݁꾣JSD50$6jBBr%SZ]nKt֭&ISwe=nI|50E}4]Kc0 h}d밡EY$*)Q6luEd\q< xZnHw.mdyvSB<i&!DI7Mn]Hw.[2%2eJ0#w(3/X̜je yX1a. fDH -XݥKEvfaVJ&ڸ+Jψ?ۛBɔ.L'Ws2d W-o . j>x9BPRi n_@4Y2f,ncaoOYlEBk֋-DLёQ!FZx (wN֎\}f4s S2Ώ^j4)EGe8l[Hp, WďNSzfUt6|JmxnٵSaIwX}¹KQX)DT\c6LXst3 YQ^\{R)׬ĕrוпSm)͛Gy +O>ήc iJt|kMR#򼠸 m)wMkV,I d;O'S B U7 X'B^G^0+`V1L\yBۗSqi_pc-r iaq3#oguՒJP|V058ٞè'9e|3RW \X$p޵o|2aL~k(6yǷl҇]? xÞa`zgbw ?8:?ark|v ÞtKfv{Џs{}u"jD2-٤S5-Ͱ4+6ufX"|r{& {p {B/_??[˄ԝByc^6ڀ#w1[OBّz(q'PZ*rMQT_(=eO†#F:nm~6sr`c]WnSZ>k,y1qR6>%ۈ1twtx輿S)fK|ߧ$f nko >%N%[6d﹢|nJs!.+'|7Lj_i* W6+=UQ]^(Arڑ)🈮=0<5p%zه 挾!AZC\^^]s#>^j"I1kG3Mpi5cv;w*Ղ-uX^F21R=XJS~:& ɞroδ|9Y/竴oru& ѓ9ةjoFk>8 |x;K<]'@izӧJfcOsnJ#y߬%LT>h Y:c$Wer#{' EsA"[*מN#醴)U9z\‡p3:/u[@RmdF؅ |6I? _2ȗy9oѤ8B2c/# Q S`≴9br^ʞ@m_t1xW=0Ē7v?M=C6XwwWaط~ŚrkN7=O?\,%cQr/jxw$s+-B4䕙y.6K'G"a@1q(x+g8FXQ"EK 4B}aՋhf]@AS VIIuGd6:( #"=Gp؂20eV(%c}\Qb!>yyͱ5*N 'BFymN@Tbv Ice 9P!`0gz$G[#< ^v Y9#aAԦـ%cXv}—[\sLG?ngBm[*g_˧ldP/oV$%eDR>M^Z}><0+Lk| gvr?d?2͗B~2>Ew&f6f<_onD`mE54*pᠾsc͘!J|ٝ& Eϟl%`*?IT2=Ƴhv*,Q`M 3^pVZ,@i*i,Smc XK f=;mVlۗoIg^Աem1dxn9,XID1ijb=8x*Sc27,3*)bF(/,oAPChQhGAqNl'Yە3UGɧTT+&w]@KZ%^H}0Py DII9Ti|Rݘ#!D[r2 12bFo鞎]O$#?-~ֆ3.A,yyY++B:FI)5s*Ew܉`A&OXO>Dm ?tF-t_ZA\rdAr Xr$o3R@ VD=&}DBA߫yt9űdH}vGo a[T (Fx9RM|kJ~B XmCƩfHٱ,\SB7 YfXxR<*ᣣѦj&QŢi cnhf2\@%D:=l7LM.iǽkMMʵa:9 RtF0-\ DrQj3.N3nc> x 1F 'pX; > Zb&NH .rǬ" N~u*`U#anu;0`[bvGBJq@!gA*JFT#,`t`PLdb&FT 'b?^\㼸6a~71.yR V&BXhkc!X^`r %W}Bєr >}#hgu?j'7|1`'§KS(>\rmȖr=yĎI]?r^m'SGJg QHgd#Z8*(B0>?Hz^BJ5p mp.Y!р)-RP}s-RdH5!L%9` ddqz1䙱lmØYMrDuX躽1"M-D-@G[""hJK5o X"]Yr%[HKRAlEn,G)H&{ bm͘)-L3Ł ͘諈x$0Jux8a5bw%ӳs4tn]fEuBIGϊ)^Z9#[W HY'&^K yɥX>&HE_iBaf*? |i6Hk-awou`;c#R#X 鐍H*J*Z{4@eRM}+&˔z*N V ^]8\Gc9*HMT aii0r nDzn{K*J: ~Q܋X[eu^RxޢR$yy9\P#ϵ=ƚr B'%lV)_hh6mF2VB z@Ս$0'5[ȼ Ɉ49vTXuG!ǚYU! HG08YYXGvmwԮ1֮8 iwLD!N+wVx<,k%i>&o2_ Y!u&o7G,K\5sT=H9 g}Sr _L_~ϛw!Û{3/|wܲe]Fz{G?7_3{xLJ scRR{YT~xpo={Ҽot38JլzLNq8ky0(%N K= wa[3_rLzLj4'21g#NӍ&KJ3<*4 Q_7oҗk5Ͷk@]qu9_=BT}3/4>6Ų:[ۢ@X}&@B}ғM&z{2E_V 2p}!IT4fz:0gH>O&m8^oL".f?nA]U~)[+mJskX@YP:lt @sY k˙_5Ǎ#"KH\_Xku%+OpYRᐔDC]^]]U]50]j@sj)Z^YZڷ׋Ypp;-`t}ռxmჺAV<])Yby4zHK(J?]wC6̢4A~6dmiX;M {<\ܖ̝0x Q$M}ĥ'V}Ty 8ՈәoRz&|IwHDE{!?1Jt;=Y"gEϺߝd7xlX6Z6?71L2HIej|4vtPR!ss4$ix]K^*IL>]If=/6,"K64kU3l@Y>d†[?/'hs9YUJmi̬XU Zv+7j/9-Vh1W8˗X$lPD’)ղ4B %3C>NKk J^ &W f&(3v*z!_ۘ 3>5_BU+g Ēi5&#AiڰP @Hb7SOG2:/;qK}mSdfpo "3v@f$Bֈ 0fX LN8f R}v٤ٝ-ߥWʳlM ̝ uWMtҁ٥tL>i4; nwRwÊoGps9s@Zk褄M\\lMyxճ"gEVϊueIk 6Bk02)E)^:hF G^ļOM؍F|k6wP-ԫVK~ZJZ,(r)fnn/a{">";?u.?,Wc@7tWBM6̀cUahc` u&&"v ͪ ݟ/p|1m_!H4Ȃ9@bU1uAƱH# Tܒ) >R2Fydή*9A^ukd-Рy%`nwcNP>ߪ`4Z$gn9VRiRJFK@eG/d&,&2RYFQ;^ `VR}J>&|1MːdAS"l%O1`Kٴl}"jVRc)֦(NQl΁)Ge2Hsk)J&amb6 Ys,5cJTزRg?FL\Sg1$G^[H_yBkg" ;iaIGN#w|v4 iIxPZJYI9R( \9)B#y;Gpq{pZW+ >_\@цKu ό^ϳGpKlwf ۜmTy$Zbc+!Y%?/*C !fN@MTlz߮GbKP+18 3G\I7-MEt:r̤(lk6a }iT&HⓘޅLe}lSI2C&*!nc'H%H!1;q&Z K v,MMUP|kAC{?%#)# k4N pJP\d\Oq(/YV08s)Y1MT=y٤%KY\e16pKZ-uʜk#˵VlZ#hꬩZYsdz=PsT rAϊ4i<+x֕nc Hi }'"!X]lH}u؍FUN|kQt+vLLW#S5&2@s2ik&O";0!1Fc@7Tby 0 Z/c%p Q*M̃xN 2dm֦?t>P[9kw8D K:@;Wh=(K2>[SEՕQ(s"Mp6ؾ"]{gٯᗏԿh2blӬ5;kaWn$Sӻ߮FaNN&kgABn<דT) J.޾TLM4l4RdռazuҞf.f_x2\ԙh8MPěx5q(h.hM 2Zs<dYѳ"g]]rq cTH$A*H6uTS"#&8`Qr܌h\o-Ж%vX4L{ Uf$M9v`y?d"o 3M~, B rX+C/tѤR5c0-,~fUK=^ZZvͶFJֶ[;rvԶ;mkYV߯FZ&Y_'Q>Nwoo,VH{_׋+*/[lw!;\PLlArR=XQqǣ3OjXEjO #ⴾ5xf)=cTmZtXl# Et{96Txߦ M8 EOsHٳUy\%,%5af( 0Cm${o?jeo34(~/%-m׬ ,(G hd3iV ^IrOre'Ps̳e=ݦ\_$W|oo~iC o^ݾ8o~SPysw_*[gn[~l?S BCk!3G( zF+{KaAγ4=uc- *hNߨm*0@Pm|ҧ'0vq>zB? 7ﻝeS+Hl,?Yyqw,yIg'o͆;{uAe].W4<1Eh"[A{K{F է15ŢSlz񫽌c6QJأFTF:?aiji%8K 8ݰÛHbwky!`'03dQe/9],~eۿ%Ol;{"혱Xe&dDxhʩQ>Hjze]84xg,IPMݧY]-TƹHK2"K$&jZXJhPL( e6"#7[b/a 5::O TTsb $v]E$$$1& hKhB@`'>l,d\XFrqs0W+mɣ)Ku,jIdAa8FI&RdqI]W:L g*|#6}QJ:Ü%K%jlm,*ʛTDR4)%tP  H$"jH|#6}Qlyki-vlmc[46jD=^:f4d`oEpI,d$<wܓ:˜}_\w5W}xS]+d~DBALJӷWեOQi2>cWTF;]O9`bhXo#q^ Icx4q&45&C5jo7ypwm'zc%n=Sޫ{ob\o 1y #bh;pR|{HgAq2Nd$hlE>~N|)OK!bpװłEPhUst >BO<>]~m{|BbOQwym @Ij>{QP4gYKI$=۰gDN]ۧ;x@@5msEVSM8qqb.)Xtc%n8X/z<4;݋xƮj佪e]c]J;޻dF\; S IrB q5|E#s!-iu7\*D1WDBp GOAg9$i&3P A :X#h{sX̴xjۂ`ʙі)l&Y2-Q,/s@Bz˱ȼ,ڂ- VBֺrETڂ=LKkf\IeE_"+/rY.rggY36qtҐt;ݾK +m$GŨŀ.~ٙ^` fdҷTvuo$[i˲$R=4Զȏ_0 F4?}DTQg(*c{x !X8/=J8nTamˋf Վ(.*jR Ŝ !}DV݈FjHqh\ߞZO#S ԄD.|_EӞ1F@zG}vQy `BGMՇaPg⨡$3-R(˦(r^ }|紨_sf|{h.)*Nkb7h.s ogA?ԔzAxWW {* G{qφ-ݏx JU3?jPz9MGC'I̕6z-;*-`d 6>;/8kNg Dc9=mht؋c;f-mk"K:b$+;x#NM)0L<5'\+k~G1x X.169_;ǩ!V/BѠRh Oj'*4MckVRF+ D0ԿLxvbɫ{J]NC{ol% HcU܁Z(_UUY#Mp5HYn9< qzK X_ ϼ%grM?wPm:;D[Cޡ'[rT|t?OgiOFl8r犳rwVLJTqէFr.#͠MQ2^9L*wIo #{.Zp&v+8\S_?n[@Rf `k΍d?Y j1IHt2xǟwjʻ~DT]{U$5cFoMw4W6&ź&F: Ztm&|kr;o~>忬zf_:OVߒo~zy Zk`A&m?_o#ws8 ҶBX ڶ7˯hG51_]_ퟵE=ΚeHhLuzNh7i:-V&nk#:cn^ȩVLȈncH. 2(woqh7k9}-u bD'u>mB/9lfm ELEJ*Ce,ؗU00JdqfIU,2A &%Sw*f:*UxTzwr;;_ =n~yWwW-?o.w>o+ʨ->j>u5/\5{ͦ/J9c$ˇ 4xV0L'KqN΃R;7턟.Sh1gÚ: %NNl$J$ %.٦%bٔd }}@>I@g%nD튮=2l_ |`; ; ir p ,*E\@5Bg6|27& Ic'j7A6 zVn^CLG{7w=Hfƙ ]`DQ%{x$y5ӥ8b(E0vBW>U3UyS1.ތT|Cp Z-;b<T*Unj3#CnJ9b (.{R Nc1nhdZ[o$# %ZW3إw3vξ35g/;Nu۵H`L~Xw!TDc'l;H%Q5 =hۍ8~Z Wh4&7_M_;FEo.i{]pӾ piR''7?r8J+\_pA_=,usݐצ=Zg~Zlw&z}8@>n(dɗnQf N~@&^FCG72 6~}u`i% cS04E#G9 2[.ix& scDd7AcURP9Q|X5/"CS[/hE Ny$7O8AZ/urg#Xɿc={ؽS9!6 $># /+dh(3Ӗkv~-gq#”*w g.0#)j^%`Q< mrN t ; A2$AK`|_~v 8P@.E޸F60d.sMJ_$$h!rW;ZiY {i31Fz}Vmyȥ[Pn~Vb߶薦JN.Z^p`X39PQ8spNAJ){:jeY8%պ&MexBUOٕ+ ?q}|83Iw3i.S&3LӺ74JCD4KQ3%7m-Bu߄ %,FOU=7q58R(Ύ#G8SBˌit.s>fd^<_N֙cBյUkO-&̍SYMwXU5΄2(Xu 7/`QB8bT͓Fs^>UXQȪ QY'W {5MA׽G)E_2$e{>2jk a;pt9dCgLyA0ρFNೝ'-(y4Alg]fv<Hm'CyTmon3y;mo+c%O$ _τmhe`icJequsDt.RXh|Hiשּ(5b^蜒\Vx`%0yPvhgct:蹯\ιGA=gh\}!Q/W29~'@ېM]*״Ibv69p?l>F"!1`䬴c\"tmX<#TSJ1g+r+6GLƀ=dB4VL%KRrEIXP"VngFNwVߒZ-EyS+di[ .)\8Q+ho@t8Ǐºи?D.8e+9ֶZF^^`#c9 ;IGL#jp8OEdy4 e#>DՎ*KT|y>{ `o4u3G`+g1I|Jάy+1`W@ΌLXh#ZkF3_XޛOԸg _{7?ǣ00xbjcΗ;*z7վ]?kvƇyor[I soÓEM(/CQ6>gatț":N1drpxϙAG,]f$P-eZbtƸkq&9 5 ֫2[?ƨ݉\:w'/,YdgeTTED>TArW(ܾ8G텹@>,.J;4{@JZds%N w9i;/QelVb#y[J؏COc@ANB A @R?cā,2(5j8T:wHEBGjA2jqlH'lZ.Lv2,͘!$S,XNhkjaԋ P.(HwsN0&z&$%| Vu fGFcq&b,pd2qFm #Cpa*`FDŽ`W:uweg)C>KYʐjNy%IkeV: F@/2d Q̇^ǵ kI6U`J\ =P s 6"0$2J襒:л\˕ 1Xш1C&2Hdr Z%[d%.t*eko3%%`UAR5ovHOYI!ؘ,%pHkش3ҀF墼bQ#hgv3T"p:L `4G"0:YV)fhTWTֲ 6fn[YH5SZχ3D#jY1w]*YvNZ(ԡ,X+)|ԝa!0䲲LK|ӊ00@j%pwWkwpR$s'_>VfkָU9j}? [4g 4v "|>$_1,_l6Et`,ѣ xAV!OE+񔠏HCBUW u:'w\ŽfXZ=A: Ti*6uY\J-3.TpކmOkᄯ{S~MeT=^޹g#ӜEbgxgyBY!KT XղH=i3]dgꆽ xu @*$z٣ 7;ᶝNmnv^hdSGc0R[.R reǑFXD>Q "[ă!ʔ)9opU;j󹦊z|jx\sqL)q&5#㒛ڵSF x\2s >![qXF8mg?iow[ŶE/?8G[Eyͻ;w{~{w/va_h:p{AGo^_0akm/|ͻⷷptǭ3~o7?ߋ]}I8@7[z2 ;—uCb~`ͫɻ7f7OFH.=~q5`^[k4ͯ LOcW#z4i9,0Ŧm~{-vX~*c*_ l/èKz>1տ jF!߷~G}P^/'L^\x>xwWQtC|Oc?lau?ٚI5T88pxx&8AERDW`Vf&CHxWbZv/*6}sP\ ?'#uiiwz0$AHܝS)ʍWO嫳O[ =?~O0A~aYyƧoqko{9$]O>tghi ̃*}w3t3(ޢ׃x^\}uX cx.S̾<>~9]W}k,;=7~١Mc`5!Fp'{/,unD\&Z(MgŤޜ>W26 "Ln3'$KNH,9!Y g*dq oIYfB>_\b;ly _PX}0D&3[W?6 #clNׅ rҏk~lޱ@TIB9czPf;NeAAepǒhI+H`CI/)JCЊ(XV[ 'h)#|օ}V"3׋l#bkӾ2@m+ 13CX{A!ڸ^ON7ڸ^kzZx=FK "ݒ,b$"Sa.)ϝy(QɀgquifJ+Ĵg_D GdHki_Gb/+Qp$ 7IQsf}a}!z}#"{;ss`LmvHy)4hcm+Wr_+\1^ҕ+]ҕ+]ҋs93ӛ%Ga/9 {QK{kT vśȡÌo1(zx2> pW1{aL% Eٷ:6ZʘY+pB*EH #f\pW C;b[ϯpM.I ,NsEfK, Tya$׋\ݵ+U pU\|w1GL5{Fe޵ .<#ȕ'L#bx4KgQ8y&Es(T }nɿ9W(>g B#ϖXt4JFE$@\jhAblR'tvB0LJ㱤i(g+cqn%+mgIy3EbiQJiHƱ.47&Ӹ<EfhV^hrr.V V+5,+)rDP葤FTfGoNq2.#(ΎWQ*RkW( (*Smlvj3E ]JQԹ*LTQ,{"+W8G2#k|,sF1Qb3 gq@rU0l,D AZ8r ,(E_!X}Ly6!LDQDLDQDe3p=(Dڂ(<}K`v"T?yđBZ 1[ZQC5vС<ʜ(w)%mNfJA = V4]ARP SRKݰr9ʥX"r &E`$!rXdWa+R*"0>@9]b#}i .^(ÊaVd! 9}FFE gi!INYUD@TNCbNtE}ގ4 n[N*0q+D6ԵMjo;Ml׃_\b}O.:.`9KCƢ RwIC(UT(iI`ʄRm6{[w F+cIi`У펿lt>p__i㽸ש̡{wrvp+v7i皦0Y?: ShlB4[0Öao`ɔEL}~:,W(g3tc$ftA;ꚛض>̪[jp|eD?֍I9(SP|9xLg;$Mw{omLS'N/Rau1ydc&+0."zʣdlgrdRD]e#2\,ʴ}E#Vǖ3EbΌLϟE!./+TeJF%2a9yk*{5?&Oi6]*>B2YÇ&=8S~1R O1S1\1g \{BSi6R3I@ ZȧBOS* 8S>Lgu6HNE5jr={9E٤ z.{E楲y^%F5Ӊ#E P3".rr!H&iK- 0q=R3gERh8 e&XP-A+/q2U ī] *@ }9۔EFflS9}۔:P RX4_R]S#/jr$Ⱦ(6*I>dKN2BT !cq+鴮C $]BEڽs|`OV 8i\ TmߛNytPKA ;)k!0\vkDQӛd z[Q7{WQӲ>82q^M!ᎡEǝ[O Nm*Uם\࿔DGᯧ?şk? rZi4wFϜ?8Pi~:~'=ήW@C䪑Z?h*DkXC,ԃYNW>-dt OK'2x#NfLP^H\/f";e+#"ƆJ"5?Ԃ?5"OqރwJ8BuT'͟ pο]0$hN]5̈́D?DKq̒|gXl B}^Ep{F$V0 =  wX 3)M[#>ajbY)1!WWWI V%_"zk^+Zš׬s2R u5:=r˂&( #EJ12+Q"kEF"8kQgr`$YN]&AeO"MeTgro쿯 j ?0~nf|ګc'R ,]pla ]Hvٽ^m3rdXRxoڿ/kg)0\:h+>B'13ސ oz.H=7-fp)^Ӽ_Q}]죙w**t l* Dgka^Jb !ZqxOcp IH.{ * =T1qeِ|ްCo;ܮG-C(Z?KS>QXP$`"',pq]&BrYWĚ[R2NX봾J \XaѥK} 6$4,ZQ"-$2a*}x a»XcPT0 JyO +E_ͤ/gdlww^4ːwmWyPt^##ɫ ,v /zYaJR"93wl#zXVK% "&*"OFȏ.}^G4rHi,=ԴR(& յ85JCT}ќ) YӔ*\k)v9e<;OԔޣIk KUi5+5*ok]pq&~xZPT b,K k`V'CF I:@ TmZ*h2M.tG"o/&ǐN-iw>dJԕbd~8?R2н|3f dL&Eh{U>rjl:#lweZEY)dgY"~{wrDmro#kp+o>f3,d6k3A]+ Bj f"Qn6'F9OGA@RNBXCE 3!N%^lil7}3 no6㟡5g640щ%f&2)9 Z{U Ml,:"* WMΐlE%Í]pki=j8{Q6 [`ۋ7WpsDnЉml48d@Q BSP#&D_I,[Z\C)@IVUgSkPgȧe^ne&3,JW,{ .!\hAa[i+O!OB;0iDʀ&rRr;A"( xDm߷7 o)o>e$&RØ>:rSN"PM_**)UwkK:b[_l?&y{U?~xWnT󗴻6O>=v/X_o1;>|wujowm;}_٭}JO{u.nn ;Ɲ [}/o3Y(Wg7ܐW{[%^]?BSݬ oy}nq/HvkGJ;F/$uOEjxy2Ed71wrHE$&#O61@D8ɀ=ѹڑ6 gi`xJ\[#x ;dN ]_x5S:ᙀ"0H"2v>ZJ 'JK,(V0 m i1m19ICtlNC%!2ЏLh8 ڸـ|pXy4Dr{ي"=:>-CCڝ`N2;KP]BLO܌č6L#F4Y2 Ks(:~@)0I~XNxE : &d(:e eneӶQ>yK6j_~$8ᒡvCX%?vJz ` OFC9-4cT hF pd@$5SnF,˝"!lH>v;Dj/|(hGxd? n_'8V8)2,~$XR3iTH+1"oxy6+%u(d(&RU"񌢕hL|.(h9%E}B4EIf1^D22 +G;#<7PC{JG1 ަ]h]`ml}?@]v5\V@SMź[FVKs;F9.$*>L~|=݌0鱖fi?g/!_HBpĦw~{yCtY~0g=@fR@npR-+N8 *1( gU 5|`}wO^i_~)G?gKSP'*0?kέOTfiA?d"HcbKNA{HNuiq/P,If6 };&ʥ}@D@h﯀˻?.8DtG =2_6 Om$~j_b ĚZ2V-}_Yxݯ/7 ëdc>ۘTUb])n&ƒʺBeUҒu 4l*uNJ&j VmTڊ3A嚕UsPM%ZVO9 f(u9W '5ivy>~TDCN<8Z&?+G+\3\~Mg|P \ٚt؝tTV֞H|g0,Q#DwI_aGߏaK% leJg2߯-ȦȐ&c{fU0qfa  ^L0P+H#+"\ O N4/@( "4B'$I(e]&MEd$|D؀Z,N0(g)1>lc*k7%YuѲ)sBpu-Q 4)Eͮe_n/bV뺶 ڲ5 U1T㝧fWw@+0ZIN=d?iXI Dɲ( H%4k[&V&HDaKVc7w`B[&EHLng}py\7]r*i]"_G%);(}8EiyECJ' Euje|lMEǕxjFZ\uj*΃hJMآ8/ZhHgMpVM܊&nlJU!Jf+NIe|lMl7cRM~MayjrT*dSgAYLO ' 3M OWtH)Uzm UYMMk!7c3%L(f3MbSaFry2 ͖v)g??m~E_]~;?MjL)f8n |A<y`~Z-K gtK= L$\}X~^4$ߞ7je-!xRmqu&/g}7a8]l~bk]ῧ>Y[`r=q ?ɹOr2q F9j䪾Z4[ nxSc,qf z/]N\־(E pn~rݝ8sg<Á,'֜5 F3"Ws" }Xt$(1ӛQDƎ7E Jp0s'#78SLf8d$\$эRΐKL!s3'yzhkPn+_6=dbu8߭gS}Dz רǃz<بǃz<j XSjIBE3iʰ\&hc 4L$aiNbc{lAϣIU cncR3k?z1H*Н[mU#gpLp z9G"~3+ϯɌ$'YYI-J^)4|mQsJ\EO(`-ZrD}`jg{bgT"R X/x>eӰֵSo}Ӻ5ۃf^OWz "w21ŠȬu>RM.;dXִ5}ް҄cBᇄ%2ĩ-, aU.K?Gaz璞_*)x<7C\e]Ыk@pLr:' +MLE< t Q\8TipkH.8t~U{N<8^O`=F̖&ku'b+KG`ShoX.#0"|{:7 kS‰#'#pzsꑸa^k XYç=-#6sUC\CziQEĆxy9\u4]V T}u@m3PKiAZ?ƾ_5ȣyf{+&TF{4"s@b<zkCbɞ¿'a%n2y 7w}5a ̞\" cx&4Ɣ6,оyC!n.Ҿ ;#R4:d/E%&p֌ }|)kP-_lQH%,_nN-*Zmk Wَ(4Ry͈DToCȢ޵fJۆdX|y )Ú{xn6j0L 8`i(p"i]E9<伅ԑϤ?Nq^A1N]9EYOI !#~/A+[D s3zʞ|īsޖSaǞ0D7xpd~ !3G?@1CӍjZM_?r9žN8VY~'5CDȿ I)ct W#gn\U=E^NU $amͿ#xX`V  ե &dXUP(  XRM9h?JZccB}TgeۭJj 09](]Ec u! yՅ́7.UD1t'&\bm 9\OMJ"(]@O2)\:؜ň֥8x'17-T;A)M /~*x g Q%URU(c*ע% X꫈rORJ ]RL[X6f]΃&r zgZr(֠!qsՒͪYΣ"hYr=RcN[,ڮ\ {2 4eibb %Lxe҃IlYZ wa6.+,ڬ)ɎQ('./^pv/xs{/ʟ/(y³n! '!0dy'EQe/ś:̺Cx^A<ɚ$yN>JS*$8>m?3v:$DQs 1two)[SoG9 1S*=ZgPEŭdCl4GRԹMgSI5D5G+ gʚytO-)%T? 8F΁M2 %o"[?uLH0Hh-OufvBRhƅsY'oϼZM..B-TD$*k M5[;G<蓣I&(*nu#ks1w1I4,YaHH-U,vkJi An ftO/bpc\"%=a 9\N0bnk+evL,Ίu-f`^0`ԹTAv+xPe`ʻ3a׼Ts YKy&XNx5չl!i[> ;4^K!0&H0$b]J]!)aO)c u%vh1:*dSpGGzMudnZvHU'TK#%pO}2ټ 1-qOqE`_){I_)K_mEb)y}6D ppq$'&\2:D`iWKk#:@'D s\WW-w8_|$ͦaS:&3u@&߸0nfp$cm+Fp]D h>p-Jbm۵tӠ %QhHrD(ZE>C΃rvvZU#3a.QZ\+ԭy 6͇'ؚQePj.YsVL%# Mn\w5[7Oݿq9uvpxmb /[\ͱh ب?M'9,ێZQ~X|F&'*L ·x95A#7rks/k4D=QIoН]W[ n\W7޶D +u0 T`E =&XDa. b.D3IlV5;]ǦԌ>qX+(Ł m<:j ]"WQx8jǖP0/\ZT`W>蒊;)D.NZ{ZdcPt~L܊޷/ MvvVȦMkhr)%Ʒ>/;Zo'_VgL4ݥ1h1fu/EG 1R4@7f&nk<ܱ֙v1ޗm ݱ$ מJoG]՞h"='l̰5 Tjzq$H % nxQ$ &oM7v$y:M}ҁ3hܛx8^_3ާ#ܗ%$X*9?y\@LNFe2MObcN'{ɨ{|8ȅŎkx1]y|.G`@߃{arrbLa cEt4JЩ \sד5ψ}i81.X RHhE-^zgbnCcQ|g%F&; ܊XisR7xoEnL Wqϸ D)PuI&9 0c?9u҅sT$A"i*c^F(̣2n0jL I@lK)&|ߕ} |0x̀k~xLjLb¢Eo5(ŀ53|yxsIRƃcFd7vʿc6xb Ë4'j?W19Ed.5ָpRTK*y  )`1Zp`I { @0T\fO jK`@>(IY8k젞#YBEwgFбócycC`Y۩%.,KeF͆6|eK6#hհDևܾv޵<#0fuݺfn-"¢[K&DPza{FcEj* D&ΕmF_)/[ F@qcr'hWab%Ap0M!Num@\}Yqz L8gZ D61{ZH nw?apEGTU{sKBTUUٴ}"aK ?a0Ov㓃(?|ʷhh] "<۱.:.}hMܿϔA^穹&qg`$`2LD0px8rtx eU@f1ܝЙ~#Lz\y!A u ?Afvk1ݿ 8Mxl|v & ,!`lo W'~~MY# OtI[#矜Ae 1w6 nS g׭WI:qX;~_?{h#SNg+I?\pc0 9 Xd0{u 4 FbV Hq otɸ{h>.xo\};|9f|5/}?uAxh{1Xqޓ'Þ_RٞPp?Ype^ *{*jeOOSs/0$0GsF\yD%q}fG^={7NA3u/a읙y#l·.O<5q{y lƷ;}&cw]:z'go\}{^g7|;/P*bA;Oa^/^`~~^0e(cPv'ᗴH|4#(C+>W^/\loyi:)|8yi~=əLl_3Ims^z\f=>_Boػ뼆.Dfe2r 2p6?,8 FׯӅ1t!BƁ6JL[s*%#TS0O]۩(1ɵ  P 3"E(k5:K|I?%\͌`>}hwfŗd[5Qcu2?ूkx"_zOq D'lN&3 :[\!oғOUFG* O#S195秎cg4pVC4N4(%[bft1vG*Pȥ`ZD("G:4?cusؚsfWmY#? n jwdmv&=Wh4댉݇2(j[-B^WޏQ GJp x~ e/RU}X\p海= U˸k7zB7ҋ,A09T)n썿A ` Je6]b8`qh F.wCa0A&X.q,-}$]$vf"A-ep5YngZbWm>b_d':ٝ$Zqq7V8-HWбsGu|qE*/zruA` x?4(^DAE'GefbIU۝+[0L&gvxKIB8󘇅tfnl*4>|K6JH.mv6>%!6S(>ea)QS9uq&C ')6|jFT$DD$7Ԇ"Dғ6U4iS_+aLac0.k 6;RsɚXw)XEpy DQBNLmJNJCP:-Ae3kYg%Y9)#jb,4!Cfml5CjC`ޅ5Q|,#4%ݒs6LɁwe`CP2uSrn mYxZ^TW@͛GT]Ժ}UN2-܇>HCL4vHZG5ȥ5*Q 5Rǿ9T)]:OqȆE,OwlHnUJ!`*Y68ƀ4 LP(8""dUKY8BR8#|T\7ݠcÈ cbdD)O9D3G Km/zO)n=A{Um޹ <&0 |LP )#TBNPCn5`s 2 !RnS\NW46fsLZ-v4 C3^EYڊ[@'t^TLJJl7$)4r`0}` #"bˠCcP]id2Zbo}8|, C8XJR턑ejJiҘa;y7Q%|%瀕IdE<4T8+OmB6`)bb (%؅1cfZQ3ƾjZ1\ <6𱵁$G_wz $nf1 : )7~,I[RVwR, &MH>,YڢZ[(XڅqeE\)$&bgQh1#H('TςbIZ2 O OeR0)"q"y^)oó @6*\]1@2^x%Kܗy?I R7h$+М(b7ΡdV܆~Bls꥗&ǡD2AaUiרl ` +=36SLf)p"N(]m peI?TPmI>ǿ̶@WnoTJmQWfG%Р&~-n N~Ļ0*jҕwʛȯ+J~M䄈(#ht3gO9!zK ! ŕ#8A]1GNYZqq{Xe&>F󯳃X~p'-Ǔ6G^.Fǵg_txy j^z&+tyRP0Z//// SV#(5%˟#ΘatZ%QkJe n")iTBMTtκJ:)֍Y3 XaC/&F.F9/^d$ QTt 97if%B _xXjˉ |>>~B9{q,۟osǛɇK.)\N,΢ Gۣ#ߦƊC>x1qmq̸!? >8Fedȉ3K/V^+VLoV[X/޺E h &;hjSeRv| Zja9h,j۫cNCZbЂфiݢM 5J Z+jt(Fo*3&hKP[{dnJPdDJ6['h3ia٢t 1P:1f4$MCR:AVQ:bNPJ:CX)h7uܭ[TT|]tC9¢d|rD$CCky^d]L7*(S줃Qke32DbYvi|Bxy1M9s%PKKW;L>!RʋP,J5h: ƋriR1p "x!8oL'W\*ɓ@cD"=JG/$0Y@lԐ4p.;qO(X F5DZT$5Jv~-5g(Qzݸf, G9j(ATQp-d%'8폨tG+9ɇBN&=XnhՒS9sNi%r#8{]e$i*_Hd$E")PC]RrFo, ˴`o#DBӦ7,5Ԝ皺%-V2̥esK$9Nےk-63r_Opg-Jh@(DB$*n eeTf&)[{)Pd iυ#y mjmM,(Fȉ1M0F1( jֶ%w$ZW6PXj -|햶,JcFjT ]PNYr7Qi,m>dh -: y.x N"6Qm8;4ʓMYcZ@8EbRQ@-dq6<] UQ}8''˹|ˌK)bEG3B/$2)˜!3$K#9>yc'zIK;춤YA,^uB@V^XMRB#Umc"R@I#֮NGB31#QF|PԊ#T2)1Wz:sLI>1^Sќc?D{X 옢q C-HTi֊H"yIҐ5Ugh>,PD-<16#U 6x n 3{k5hkj4Y@()ؕ-PKpm8wf%5͐B͜5,yy"1s2W5푽KXMDJӦ[5B [lʴ,ma`/EU](b+EMA%LdE3Zss|F'ƀmhK֮)\W~`Q巻̾[պ+pc&}:j c2b*h_Rڂpd %Mר[_E}]^ڱ8]֕Bn2^si9 d5cAy~wyxrΎ&2t(^7"sS)-g+:=,"{g/./V2DͶ gYaJKѶ,kOT ؞PZL~񡋳%8f{0fxY?7}F);!0f"<{҅4M]p$.FS.X0Y$`X%Wa6XZI;b'ݨC@,+<9[rz܄_oZ`&RG[7?6dE%$Mhy$]1!p=%Yu7$ՅO~h}ؼP"~?"1 d|޲P $7ly*W ǟ[gߢg~5EkiM?G.y{JhdJB3Eg2?Hv_|E,sӇؙ+z>jh,MWFo)RTxw~LwTf.e_ή9,[D>+<<6Qs*\~Qmsҁpcd5l M(5Ov U3&yI4LfNtz6dR1ژsw찊gB;HCBx[Ka/k xۻ /Y5`A.Nhi@EH aa$ZUb3>sd>sGcJ `3onr%%mBco2x|1./w\FoSpgZ \ihj^#2 J$ |bahi6hߛܑLɅv/=jF-.R0l2+&!x" pi e1V^NtV[8; #U =Ju,ms*wѦ \GF)cbFY7 <}$@o(93L%zVcRa;w.y!T_9b[;5zp{ͤ`'qK{k0'YmZiv5ךQK__h.l+E7&CAlHsFn̰Z|6?${Oοڼ(^r M.oїY.p|vWV~E++39Ϗ~x)OJ=nүn6קWi~pLxzsӷOY*0ͶKA?nr*nr]&ED;Q,8ƘM(WXs &e^GTT<;Fpڣz7hUukT.р M0!18" #fDN☠Gq@uEKcB`?ui*)nfm!MU*ңګj#TAfԬ(6CGTsI,hvD 2b( J- J$5Dk( "K)*Y'H8!,B#Fb-#*6NU+잒F"Nz BdxJ(8\ Yt$ڋwI<?YpGRgZdy_<+<7KGz=5~,9VQվCHK&X!s<̑R[x/ 9 ŌَsAq|xi(i5茁r|A#Pv " vBָ0!m={@Bwo`,4bRٔr³qǠBVT׷ʐ)>5yNE&x0`dUZ|2OQJض#V5_(r te}'??=W r%aQ:Lt$Wa34P{)A&՚FQM#oU R+h$8c۾%0}"Qpaaڿ|dMY `{WY1S=l'+xPހ'4 >Y<1ݒ"Ogz)bhoKE(i_A%Z"M܏7JA8A>+iX{ޱ 7P0$mėҜ#~'9#(_f4cfp~ONyٹ3,cs]\P\uEI2W֙'\w 88Hy DߦVt|@ vŗd0ىK_*quk3s/4)S@|de"k جxAd p[`'@0RI9֕TJYO_WrzjǢ*֕4ְt"JRŁ5% G E\!iùdU+YK1T"nEלҼ35jg=PZw q]Y+e%'hEE9b+UJA^+xU%չ*Lt^ɋ8=Z�%8uH+X%Æ3"Oȼf'f4cwm*j.SIBQ1&,.UOaS_)Y^ĥUϢQp:Y#}F8-C? 56}Nے]ٗA2g/͎dUJ,yQX.Vj\Uto $N~ էAMST UӤcMTZ@+$Cq2{ 3xVv&%7e2fFQm(Og%r9&ǐ:&)Y~lUch)M?3ʊ%\h}vMʴfWU,ۿ9Us+:cjo^Unb9^jI1 ൤;, IJbm@pLdBpaP0V\PH@*H& '\ Z@@0kfnrY'] 9ڟU3G4.4)*hy?jF;\z aA {{Eh<8r?TCv~ɏ$#(c3K?W|6LdT}_R[o ~jv9*Jic&=FG3/2+rZ\/S!:?lQI'[U \D;hӅaBִ[剺j:$Ex㦪]R\ͭ_rHKN4H.3mQբ!TƜ7U҂ӛnجl *f2[)# tMRVg2)NN[YIUHƅl5fD&S3 =n/]Y {!neZzGBK ؒ(KNQfQJzlF֣,p._K/O]Ӿ̧hOE޺CoQn}E0MH(mZ.g)%i(үj"0#u_Ъyyif"C&DHvRegEZ| (bt4'5ҩZA3e KeAz7|lՎ]6;3_VEf9 `/2N e3dmtBp'L!R͉㏯RTIm紸dO$سo)`A͡]B[\2w'8XATѸ{NfDņ}kDkfs扒-&oy8cf rXs8t^^ZEM̘AursfD4P7#$GX4ua)PXڋ5K1q<iBA>6c懡~ȕ& Q'&) <](AT75'~mj]gtH_ݷ'yY3IKXbqu5]zfk21r,lNz*麾"79aRSJ (J .r/,~V u!Sviǥ`Bw:車\wkh9}EƸfįZ9HR_w}h+ y[}=fp^oc\͚pcuW]bGD}t=av6% fZHڕx¥]R]nQXv$s9XjQ;Խ#;ܲw)NiwN/' kOO Bu'|j_R(r{@Y݉E5 жbk`Db穫( \aٵo"RڱS@JP{DM%Ooܑ( ?{!^~<]~ma0? fܿ3Ρz<]qI%e\iNp̣Oo\]cP!8L י RIy8zkVԯ[wݲS'TBX5p:tS-wZ.ªl wj0k ӥvp[]ĕ #cL_F5m^ě繙f|?1Q 8G^\8mvq0>3+/A[0|'+qKj7ĭvĽZuN-^sOwO[^m?:I8 (چ%6yqM([!mz5 {5PQƍF8(HBM+w_btUabBħmc/v<:v2 t& å{ٷXxlÄ5o|Od%*rT#TeCcjs84 Õ9EQL8F4+Annj!.7pNaiR`d&x:l5ngcJ˶`d}F9"%Lh$`^gw9 M{*|2M +zLI2[˟ t[ %&ϧ%؏AklLٗ0~Xڭ,S$Ʌ ә%Uqi$KH_1.)̔%L٣4i\_ X.WI\MM"b0IEHId4&8(N"?bOfi9WNYp &==E2D**Jt뻇V'ؑH|M|Li Fs3UwyO$yـ!k=n*滟Ff 2hlj7ۿ7gaYU<5y>y~MC|>D*' >nNǢoK?/qqqqZ@{F1 { r)* MX̜$H'nkuE狼PR*)IB'd!3bWZsr|qvr[ĨVQi [mFۇmGâF%1gj9b&a2.1ʂ%^! Fe;,F+k/?8#iݼz&D3ٗlBE>zt8݁Y$#.$VṞ "9rkL$jd {$!"v1}Dz;" 9t֎HSTHjgGM(P3.5RK i薴4'yvNtl&ME_K"9a:uRZDEm/-8[x+4‚w.{څL~F3uT@YWRP$(qV^&[aN6ME)+l*`٣ϔI'a#evՑƴNG络콕i:"xHIGu!~Șe,ߣ04ݩwʰ߃  wζH{щIkcހ,hC\Ѱv):}c `(%oqu>dh1"Z|x> ]+}_߽|0vׯr*U 82|\𝸳k!A?ife?܄S"QϞɴo *pW$bi2|J3dM *! F;cT<9Pޢ7lRXuUZ Xą:h=;fcyHFTA`'Zbq2ñ Dy= d= Br #LI$C]a9y gy>ER Yr]0Da EC)B`Ɲ56YKxg6JERRݷ:-x.}oֈkb'+\B7ZGw"sL}h;FZ3:'xr3:U#$"O'h|C޺Z;P߆EM D# 4;Nf}ar3JAZb8GsI!Gn+2K1 0.Sp< 'vG&)*i.z+~RҔP@m{ʺҗZR,%5,ڑ6L#%L?/ʥ$58K҅ kB$\Npţ=NGc1]2?51:Xdky%@:92M]9tF;twT-_LشcT2>Чzw@{uaD͖Jin yrƸ!8M,fA1,c2PL.L39aɶ~AL# Xoؼ9r/'آ?~;#G9@hI^ZAY#> -^ "Gx'_-["EC4IQhn@KcpuKvD[^*R7ľz]ZݪAD&|-@ReG:nyb"6v”#6xCEc!,i,itpP%8lsa+#%Ra(-2X BAA6U2;T\|V 4;kGii.Lzv%mY*뉷5ڋ Hi4N3mDPD.&2Q94Y @@JIYL,3 nXww=2|-JJpK.)ѝLsO HVs a\4V$`Xm@@<0nCG]l(e;%`ǒ^smƙHNʅ5ɑH)5 ɨҤD8$Ӝ@E#:NrR=UAJ.Aʦg rT:3YԨU3gGZۑ8,$B vc 1\q$Xɨ%Ib;J1S\v$/\փUI"֮ M,\63p$F9-ju9b6q(L:`Q+Q$Hj %,yp0P7Mbќnl f&&K&:[% QkRO L3=M$QIVRis݈@>Dgq,%p,e4K:bBntc@G LrHSehZ!mKDkd59VZV7hڝ㬧t`iel#LPA.;^ʑyH:~Yϼɛ;t"O T0LgDZy!!1!\iF5d.ϞN AOsꭧ%$P X\}%< - )Tk)HiSF8ArƗ}Q- $OѰD;߼ۊF(OE"?ud{Yꘪ˗>&}7|}Ӌ4(Oz8s.hF.>8ז wuӾ(EOٴ\Z{&):׻nyk4&@ƵO5b_rK8)#q02~$?DlLb#bG^>ɣ'n)<ߕw˖?HtpL(o4z/3_xy|W/ǣ8õ,c?? Z*NYڍ*29v.=zcѼ# E4Kj5}cp v˥AFkL6vF4Uu!!߹ɔN)}JclOxgGU؟loGP AR ɣҡH]]l)=RV48o`zڠsճHI M[޴^ ӛM/e\0w2?SzH$${gk$b؎ֺ/rã}+-5{/(ld` _6ѽ#dΟ<pTI5GO%sty`KgdΟ<|!88ЎH` Qމ}9BASΉ3h2WPtʹwڱW0ܹ Z+qxWG8:t>d~ k@@?ҡ CoE7 CA!rרbofCoVZDG^l62aSA)È0/- j '4(| 25%+(q,K֤_?% fV.ͪXfG5RI]pȃTp@@NyW:}Pm,%;jٞSX홍]r8OlGEI{lr}Mi& 1)uqγG|TDқ@m}Dk]ܵuk߾8{=(-~[U%58X;y yp;%ABl]Utʂ0@1F> MD1 lJG^N1#2O5B)^æI-Qeo?N̵y3b0zh>\7+s:KAfuz*B[ϯ=hu&+(]h9.W€x3c %l9*[XTkAtX^[T8dA.J~]2w_-RKvޣ ߏQBeE uӪO`8w/-,"Ksݷc4w/-b}W89-įwAJ&\B?i*7w_u V`D1yBuȂ&:ɑ[:ASC]cZ%PF.I!V#ɂqĉ8ʠh~u\-92U^.]'Q~`gKli_k亟)c}kٳaKUM^8J%Qo{gÛ]xS^g,U DՏ~gG;;0/"|w7uOu> ZYŰg 9GlJ!Xy6R՚jn]}zMqmzz;X_\saEQpKN_bUd̛ PO  h%ݞ{zgrpQB 4nnI@sFp]!O?8~hǂTG`l<^ˑ:ZhiWB}.B{ |՗9* [ɲcn__{-g~_W`vQkr4fj/a $ h|Wy<)xA68~(oYpF٩g;}+Q)es&z<4&to ~!iY.]dIJ v5$ydDK ǝ  IM4z,Sl>w.cE}hk>(DyOe\}Z|ghE(PߗI/ut͚f6sK[̖^8+N9kQ^9dr2 ? >f.CANv&Tt-7+_T:s%jrg.sO\3S6K,m Zq`ʥJ%*Y.O*SR:0kd! HaJɑ']oWiZ<(3y勒nIk-5B.j'D)qgk9J#_ӥ+[\: ga+9^]x}Mŵr9N9%ٝӣ0¤ׯ_~i`kgS<Ia jO :qWlx߆fB׭[0%jQ68l6q"$Qsp!{qɕȋxZ!Wь~u!($ YhaUwpB3E !$~Ef'G^=b[bP')Z#rQA^"r/tEY_dQգx<)dq H74qc@62NH=>oVGm_$PSa@R5LŸW0w&%eq6nbں+*.͠Z{ Ρ}-bc׶kaƒKh+ . Ks(n&{-okw}8^un}Z^f[CKEO4b$7T"_]䋺|QwQ{ǠmO{CHF;2J3zc{g@K^j1Z1{Q|7r!?Px9-)jaf_O޳^ ;yIWHINT,J~w'z?LsE5{~EgDYk;e]3ws>>L: R#[?=9/ǨIjȧ]9*tYdM9H3WJw_ko}["A`>4.ѹEk§sSEU^vxxlc~#y<֔{Oŧy랫'ͳB( '< 8REJ񗏜1ND$qz8=X,|O6.FWwy&2|~}Y(9{ O>O_5W͖܌ֲR+梀V.eB$'G ĉ̒7CY6qoch=Qِm<$Hq١/R7kFN5ujzFBC=ڦZNK'#j^Xe!NA\DTVguZc) |@1g׎hA3ρI,z !(u BiAdjMdZ#$2f(pqa1*)ڝ Cak@4*tTvh-j>LLr~PYd[[%nm{Q<ÔV'1wiA?ӚR]+>VȎ_)bJ ]nhVaQLlmWZwiqo`4BqWP8lAWhDcxz"eb+d|:oX;ߣa+<>r! ˴|%j=b3{lW!yuZ\ZoYFǬ~8-8V]}I==[a$\꾪*Y; =q樼@ɋYTOHk<全7py@{qnΠupܯbD^]+H*nhyaϘG1_{r\3Y@X-gCZfc"ôNk7k2Oٵ .@1||^l4r!fhhԀUvocZC2浆j Tkh |5bb+ggz2:؀a+ükV$ yAQD12T֫VjI>o_#m^d"ٯismB;:r++]h! -RIE*~qP4ԮEj}|p7I[3:F'%\:F5B2(>zb5ZIm7ϣ c r a<:Ł0tndIp.#;-,)1i8l@9oe΂ꎷsTYm`^KAO&:~—MAFԸ  Z!jc|a4d5bǴDhA' MH 5`JB'!RQ\B C+F`Q; 9pkiA)[fUԌ ˔"QlrT`)Eq{!18.%iI#1պTZWb3(`YWX*Rms#K)V!g5ocBrMƸ#HZ ~Vj3Z -VjPkJCiB@3 Ib82Q!HԹp)E{E Uքh er.kEnweQg@ب2+*aSL1 qOb\f0i*lO{(Z"k&=zCdC2"zɒ^6UX3 wJtPH`cIQj3uM1M)F0(QDڐY'-]P ,K[FBFHuRɿT0l`Vu`NҏVFKjЌh2 4<8Wif[$op̀5!'OB`ѥX(цM`ST) _瑉v :FTpQzj 5*8U@HFCO[mX.Đif5YCJq>T첆0Ε m:P@h7;F'g$B3JN\rdh0=ǒ՟e!M׋2l֭%Klc[ªIm v!?I~[P\/cWxLGx}&;)llŞZ#Pde+>eƫr[|LSGQߠmӓDܾh3pȘܒ/̻,n_Ƞtֳk39C^5B.#L13--QkS4QSGER>Ҩqmd@ɡ?  Q9 0=8K^EDtnv < Z}"c؀jzIp$0+i@7VVyCeZ%/i}ћ>Қq9:⹋WN^E9NUH :( h\?mh-<{i9,K#YgPsjI"gޯkѮqtzvzJiee•M刳E+j'9܃T|walF 5^cs(ZB%(Q׌3JOIOִ)K`,v Sʑ/nMhh8Q<[wi㺯 zgܤBG"w>Y^M跫lfm8.5ZܙM*ٗQ_ZsRJ1%Y G/XJ̏kGfu9~tpY;/*]ʝ_}MZ'O^Rpp\$%PF$>5>)1cŕ$ivp*EOmBqj9ur%K<☊&-3'a2k RB1ʼn[AIh[#92bDRBLup1 (&c$L;Z$ %$n6iL ulSS3&dw\Br9m{ r 0Jzj ;Kn9!FuD.aKJ ǘ!2K\d (n0nDe͂J߂oz/=f*ShJPs|$+xhР朤 Up[~P޵d"e;KuN]" ;nLw^v`5Ėv{1}%٢dQ*.iFΥx-SH2)CQl\@˃!X儉DuAkZݗuA5@.6QjAKƨmT&zi"w3pq&_6xǼr8 KpɄ4@WUV `]39n;"yMIAmSuy ))._cN%4r(:/K:Vf |*ϳScM@ 7:# sxݔܼ:x j,*[Ϸomξנ 7n^ݦU??Tw%[ؿ^u+7i"ퟻ n{6 0ViCSƫxA[R ͏?#;M>LoPm;a-+֎ٖSJ/*MPۻ_\o=U#NHNBtڸ ]GRcL0Je@tf_QA [&+_}uI9\RDw2tb!9b[mPN3\BpZh5^Iex]XU{d{k~6RAϧ^<ȸ}V7ssy󋧆m5>~ !-vls'iiĮǏРCEM5c\6Q چփVke'v ##j=2pg{Viz yR7ݬ vy ^i*!/ژ {=?>YR@6$Z3zYb:%`ƌ#^s=0geux , ]mq!M훒`AxC7Bk҄_rWj3GW kutn̄umNd6hX0K@<>gY!jX*!xbfu' C`%M#Aܜ=4::f].q홀s@x\*I\deh)j'R*%eWPwW6 d֌Vj{lITl"jRWg$d--_#B^Ͽٚu39 ".yMѱfO.qrSN (!adNMVucӺKliA2w]72ݶm:8F^!DLZuMݞΥe21Z?QqƿL3r{~~Ch|?!Tԇ -(.xUʁsg/Y+d}?}@#&-ϧ6/"c&4R^0NK)cd+^i;flFLK)"NO%'WKH6u q˧/ɴK,s,AZ<9> 4Ju6J ChpE"8еçW5 c+q88)r:w }舕i!0%8\)BY_X )p˴|U*V=gV{? vuoWӻ Dw06G|"LH>aONuBMB@ω*gFWң/cgaE xg|풃Se_H+ S(uЂ2 'BEi1^oXθo7)Ǭ-qb/ce}CsN\r*)Ro s("Ѵsݻ 莩J$*iF^Z~rE~ZeP(C!b9If'rr5Iaտ]xY$xE?67w1QrRg'BO5>毪,fjƸTz?Rfqv +^߈JyC Y{Ue3^Z%`v1P- uoڵΰS;C]~(T*P:=EΞ GkOTvimXݬ m=;8=r4\$"(p ϸB{5F(4Ld>%@H`?Ooz{{ӡcd *'r<67EM,Yi~N5c3䊚I/B&5(?E{tET~xw~%DLk?Fy^?mrжTdnTnbˌ.Y ΉŗQ-[.=:[d86@Ex҅w e{4QI/blktwB\ߜ߹a>]Ym|?6D! {[PpWL44^(~4$ rVt8b}Sb07vj?,S~'E;eK{W~+ۮ&9gߕ1]#-\DŽs7nUYબ[pU--XMn 䝳JD΂Uu2,"pJsA$a@[R~yixMzvޭ7L*{z~u$\8(GC2 Ih=3Z2)9k0dԤb:We1UYLZL: UF8o< D(鉣K.e0SbIg [S {N՜n;sS鮁u^jSciKWe Uc@hlJ\^/p<'Qj,ZCrGLtZ9񩸳$)F%-!3l^L6|zXWϋ*?$ϸu{4l fI*A'lF9 s N wE=C@3(#{|D ttBr3-N-DCt\ur KwDnN7ƒRISk$$YoE ζ T)˘0&%~"8$Ę`\Lj.ڔ oM1pn)]זn{cu sʫֱo %5}>7(25S-k]g",=)QxXfOIl\J,ޱwo.{|o3 3^B,-VF$gOo^Kz7_5hy~`5"k;d:}=Lx})6pFJO>=(16P¸fA7}Bi= (KSG// 1r49E| Z]dJqnFm7?>bx8M>Low9'@gHZ儃Ɋ3hw Ý+pљQˊF2ńqe4L,pfcԼB!@ 1IbxD1v!YIdjk}Ԉ(fCJg_'@GAOeѴu'IG1]/ q̖똓͛_>5{[tDͬOjf5JvLM/ J|Ln)UNbx0< FaA%u wN xV.qwSF@tްr` &$WEܐy>jH#o@^>69pwdIސ6]Ž3007&Mk#9rNj3\y&;J'%9uBH%9Ń Өi ޘenŸ?xFm;Is섬 4P*%Uذs#`^uWph ΑwzCJsAKE@_rL%$MgTI!17-p ,ϠNlPg'1`(7gF=jOr$%Jڧk46JHFS.fTwe=IegfUG`=cwvz0 ]%MJ*%RI]|M_DFDFAERYUTbm8BT{E rA(7|;rChk oq .7 J(9܇7y9Jvqp(zH I %BH| ܆LVm,Q97D)dM0rD.օrSQ\+8n*;uWP@cTCX}zxJkG%Fb?T+n\R娣'yT}Y<¸w]OB)!$3^o]ӻY]A7^f)_nϳr0黎iV&/SjvW_Ϸۗ߭/R3Sl +k#!\DdC&*gnN7h%O1M/WŐW.esř:3O2tǧJRU)<]<ΟnW]&6^ټh'wHz{yQͯfzr^ͷ/;/"qV^鈻.8e$ys/zkʗD] N#89jcMܕ虫m`(``W{ đU83 R -P:A6D|\ hܱTPAܶqpqzpg18U jc D8Y=˵+=^ bn@z+`~7$!Zʘ4H.Pskk@nI(s AJ}2"dy6sB{NTfG #( N ^HolxG<$_VvJWpN5f>{Z.#}e.y`Â6@cyo+}I^h7|cȐC.z,rrY7[=Q@QDsf4$~4$s)Ԏ"xmf0Ľ"N{ĶLi ADzN"Z}5]ݨ>BfԻzt̬'#V!ujzsttֽ9 E/<̑9p$ ttvAB҂pKIe"Ƙb,&@ a7;6@x /x5 wk~ߛ}߭bĜN5 R?7,zx,XBeZw3}~wf֓2GI69G{WOa蟣 ]#.+.!.!%} ~K>d[sX7|cȲAGIf>|e{ x7,8%:nUovK2%~v&q1sH$)$jB9o^)Hn":9M_/zx&#gywW?yt̬' mꏣ]?ݻWOFHh;GJY!H z>aF8A:p(K!/S~17wY:AQn8A!ZLJB(Gr,1e-X-JVrlsS0"7>h81S! Y+D1& H*AF&H"0iw/Pi*3<:s& s3@CdvJ1"DRp(( c% vqՎ^f[ آ 2$aj{vǝTݾJ3=^SvWk} s,})dĺlQ,sa5qqD\R ^9^>3 7J-2`=;y^=d3,#YBz CLCy; ;XQ,җ(ڊbJ\}fvo>TJS3*{L8˩9J淗7WV7\_3a4+L 2G BqyU^j¡͹0P`ri'WKytzV1Ԣ%\8P4FSM f*H;?B&]r9b s!B-@2i>xuACtq$a) ޵Rl}oo}2a<۽zP7kuF Λe,y^4_= u3P/$z^XZ~}%·Ê?Kb<;pW-D`Q8z]#7j7TJh"0]ah Yt.'17#QIƜɯajqZNgqal9RjXĢxS+rUa:j%:^.Q۔~yr"Y9 D`QiwYT. .f0 tc-ݎ*__tx JAbXW2[հs omò=VXܱ'7װo\ʠA|u|:NdX;ppׂD5_ku>v]2ڿHY`aDF(3Ȑp<v\wd|b`s1Ș5ԑұ3BQh"sb֑B hiB' $E 0(dot{>/eS؊󘍇faf#2栙UAOO7ΦM"ɳ[-vЩ~l%  D}թMv8Z1~xaU)7Ks(IYi3)42qTM`#>_H|Ji$xqdl C/+,Uttw!Z̎NQ>$S3-rpGWr% [!cpyd1Byjo:,^_Sc;'a<]Z?Y9}{0K$%N\8 dljefg'5HF}==t&Hb.ݛЇf pL&@i>)- 0IRDv$SV_H .isD"%RLKhEOOwO|28 ٖIFQQ)T鯘L 2Z@tNp΋O}Y>8B#fC0^^7*&^'{lŗc:G {e{9_2;_b9GЌ3qa# q֯|fCisprjC-#n* ._Qc@)'X)ѓ2tþa"1Rc!}&ۋ7#_4u0wy ΧP7(.{i:ij:=.f1_y$23H0Wu,~=系{<!w-\BIuǶF{q~}ĕ8aLѠ#`Ux› H!ŁN b湲_R4jw9&7֝`u>g%Ym@L??o.nZ*gap݊Ο޽yD!|YT&m2Ls*x%OD`v%E\q#L ̹ʋXM89 lU.{]yoA}-Uz!輻gnouӻ7~}w_rȽ9Կ|Wj7;9IFV"¥#918S` ,4*@.q#xr8J ,-Ǐ؛&jY:NKZ3*r x %VP(.uQ0 ,++ D'"$C'EÀϯ~ƒ6 D! HjMA,yAFz@.g?1d) J>Le\a nN r^Xc"A7WwE2)B$H:ĤɄV1(S tռ|[WJVz( %6 CEaa]_>TO4v,Zآ{xC<$.yک$)PSe{3N=oQ^ࣖ1x%y'ʒW1" S} _(f-A@T+v03][oǒ+_v~1' q./čDꐔP>Ù!"#tWUuQݦ~+,/U aѩ&$bT(ETJԽ\t<Š 3 N; 7W?3z7:Xr5އ'ų?=n}ό3U`Md\u>?/c֠=?*BOKvŠZ̎^<\ a,x zӖD K5kF%"jq@ yN짉<\~?z1A;c knG14W:7'Et|; }g0WK\|rûLU(/&>xg~?jY_!RtO30a0&>okpcKD$M~⢾Gw.ՋB!5pGv}׼V멝.# g jYkbN'z,Aag2iS-@POKs׹._v-$9hNp-"0gP9\X1pc.ę{?j ) R(rd m gˋhqxuc/ZES\9n ,+8jE3r%ݙZJ; `ݜ{B^efyBr\?w1D؍@Lq,P>MkiIޖ5uc8\p?z,ǡ$ʣ!.)u΋9RIDȑ0-?=rLZ$NRQd ֎XaB]Wm @e`!&D[. eשqJ5/*ק!eʛ)2(yNwY}k=pa{8 f3X" wZ=8}V)TSR)^;r4ðCD-\9⺱ |vՊx2DI+BȺ7bJbFMnދk?Bx0S'c;.ދB2M*J!ނж[SqRS聄稞|~1 H\z1JRId1m8?A4ys!84 sB$YDbՆBͬneVX`5utk&Fik&FiQ>8fI+Q=K hDIGF,K46JF{I\ Sdp*Z)ъ#&<3;HHSE*zV@.DHʗCV㨶p ~ek4UH"Te8j}}j5cgpU(0qTVk#t=̔0'97IaN7E΢wU"Pnim7P04-%X![o\0:Za,Jx! i\&(a F͜`(xHjSz Q2:弒kN7R`R\±`Sx~ sB5!tFjC>$1$&I 4I&EyЌ p>J ^~>tc ,hq`ribpQ(0)X|-,<¤'6p4u\{ec Ka2bM{B4a1`@VLRj0Fx9H21DZh' or}z$"6EV-V$t5Z]_1)'VW9W!H"D?Dy=x@%mJ]}W)=qTn:Lsvq@8~z[r/)Z?Q 8'8B!IP x. ]`PK2Ŵ9iĖ9hP @QQ<*+ E Η%6/WqP؇x,L>x),@##3P^&u?mJQUYS( A[#NZqHrz4[ iQ#!a#Wq}jNDKw˔`#B4E(Ӭ|D.= &K$ 7hӟU)٦s ;{4΋AeJyw?]ZnOo+ٻ} EUηۤZXVAY8XڪMPv(p%inJQ CX3OZ G$c6`~W,1%Ǫ3k _}|aΔ W":gF{I%{DK}@YǭOk^vr^G =jCd4SZVx^p\PA%eʧR])hܖ@ t,s0@`uhnN2v_eDt-R]}gZ Z] pqZvvZ0v؉+$3vnR3\AQ>>^O}LETlJ}R+TElgU -ڈ4!N1]bl5+I.9 y>n5,J6b ]t, >lci#r/ѳtbtBVȨ_ PwDCcβ牷1){Ѻ90jrF.S-0R[̨ks s+ٖ}+CϿUB*b\Qz1sL~hn)$Eri9R˽BҁM#aAD 2qxaƳ( 5bQRS6RQq5FLfL|o!uߏZ.9PL$j6Jv+/ߔ`m8u C q A^ =zSThn,W!.D K 3ٻ޶%W?<$nQ6riQ4FTQrg%Zxu<$gٝR>(,ȧe q~#,yѓbu`#$8M` |q 'l707+^ Rwe:Gn=q+LDKIĜ&~!N $*dn'$^x}ZzA.$iMo2GF|'n7=cl{r=)X=;\Ƶ8Ġ[:)jEp!H~t|2D'eRf`onq06b79cӌ}b}ٌ40#)qG Y~fl$+bZBNZqcmqBn _˘}Z% ^nnɯ Ї) kŌXӃ¼əQt+rJL=,N` \a݅#̅ ֔peTȶ+  pXǿs~[~)pMMo+G-87kUj{&l;Y3:modNHv30'oGj"Y GfQ~\HmU q]YRJ+Dž3nN{h)dKTb?v6;yS8yB̫0'N 1t2…u03t"yst% %3Ҏ@>g&7t9=`eN 5؃\p; N>ti,F}31>1dX#N/bMڹ;.8kեYaݢ<@K Ư-h{3ǎM]Y>Q=c 5_IBUeZ ne>lۭ6McpMj@%m 6+6dk^PI)g<Tj湭)ۻA;,-ZˬڵYr:6XCٗ^ކ8IVZroixmH͛jnL@LhzlIjṲ1j VZ ZJvFh5S ,0jA 19h,DמAvWAn EYY2V.^)}3'峇6) 47d5q6PU㼁u/cTq2b͞ȞCEJl G>JjCҸ9VA8R%m4?7n#XoJ[7dTTG&EmXVe" Sl9:7 RXNTL[7dA*&s8yE5 }M}:{OGVXZAj EZ1rZtӹÃ`Ķni5di\BYEhТ:(󸙝::rD)²nFl CUVuC*_uV"T]L0Xuekjs)UN :yI*::%Nj/We"y6v~%I 9Ƣd&_e8̌+#+QRV-fE&nhUfŒPe"JާpbLv𤂮(^gRԓH䩦t^΋PL)\4}1%X)sh9{ k^#Џ4 zR +^-QcVfJ J(mlͩ.5$JڜJĚcRł#>vF0ŝ.tYSDf$y&5'5Ǔ,ַeV yoVȪbAVyIe ;ne[M52B NbJքUbP)+I[x,{IMx-K8?r`.qmLo56Guq9RR0Ip leƎlNU. z^iגGe0qjBd؞+Tm6+IdR=L#I0u:asҺ؟/.`_8+>URԴ 3 Z]3,F;N]ءܘ: ,e}V.%b[2auNʀcE(ۺi%}$ҵԏ.9yɁϬ/u8y; E~h3.F8g\W Z.!3Gaq RpO\m%\Ce .K݁|kO­<:`2„sAsXf=7b|wtִ#LGc7ߟ>荻CPo.|zz9oOҭ^|yq󟫫7߼>Mv߼z{~FPw&Xկ?ӍOn~~u/x򌯧4UG}*{&uve7YM0^L'1A73fuZx\ x&M%y81pfn= N_Op&<#`ի@ܻoJ!ZnULcOMQxwJO9+sW˫u_Nܻs3,>_A2wTӗpq^\5MzO/Eoo]oh2%>'ߌ F*_=sH tP՛$wK^:'%8*nlps&!Kںm"P:(yg"ݻZ'lW8 *kG/._^nQ;z;Z+r|Rv/WUK$Sa]!&fn?tGFo5qfXhܳ$Rg66wWyzHrD#)ʃyrǰy'Tyxŝ){pC2S #D"=⹛;o'+X'D^Xڨ72]0јa rOQKD0DDU*_.0/hs(O`@Pr? xEXQ>`gB+sN'bQ}K+;fg>t?),<»}8zg#Nm6)#FtE(=IBNb pď %5ńf=S6A!KksuA3wxtI~8ݛdzʑw9m:z /N@szL:"g,ix:|fM,'<w6 GS°3i}G8RI1S|Kdʌ{7&=}9+=mɆ''Nv9qb^]D zzo%ϒdX$yE9ܟ1D֑ VÈJDRnc$ETQ3Eŧ6zd3>^n3!4Ql"HSnQ.c2"!Q*=F%حJ&_)$Elw9(s.D5I *: 4{e2S/HkFJPMʤb(D5^;B\PNu5þ h.S؏p 9tF5R#{! Q"$'=PKTH}?XN5p8׿LT}v7D;fjǺ;CXw`7yǠ-ѠZQʥe}N^<о|Kz;?v_.ZN`&*,8ur"!9)r&C? ԗ\ ; L} f+'SE,iau@;Y:ICyi/_r3d^)`ϞͦP#WD<nE_U ?=yŬڿ~'JZx"|Y%z%i?ϟ~Eeq} կXC\ן &`X0f~KMqbV_,.fUMSBgǗ[2pmK4|?s )V7}rM6J[}|ӑRC4νZ$ـ~,VPg%r9EtL)䓳M%cFpE$魄mڝE͍jWH)ڶ5nшow4^)F9;h˫C f[6#hoۜ~f]},N\LQVFhQ8gLrk fvX]t2/eŹVfUۡ)HNfa(z˞Liѳ9jжexP9Qڀ|A18ҹ\ԇ_%!r).JeEA]*PW%J4*î2j GMۚYl&T̗ kƣS],^J͖$%F$isФOoNf%fyryma䁮M4S2Xݶ:xUڨo~;>løY}FvrͿ#rZz]5_XUs8^X/)m~Z,CvÇLJEl/Ԑ)ث\wOg뺘l1zV“ԞRX ):h]b:IH";g1Sm۝Pboe2Fp#/<] WwN@lI͝a[F$,1r^<zxH=3x}߹Rq ?>xu'Zᯘ1cG" Oϭ_(~q#nqmexlz}@  @Vr1GeaL#F-gڱlPCksI0ӗv8;og@gs w<炯ly?5~`?ڲoh^E#8=ϧXGճ,Og/e6:lI71gi}Y֣QLQ^{[ jɢ$\LYitZ.l )KKzde6`TbNt龑FdvBosU57]UsU57]禺q }00`!Yr!j Ч`'u[u}𣛱54Q\J+kwnzqr( {o%D4ʁW4 UpGϢ>t&5FClK 7:JZ/Z^*CI=AYB AQ`GpuN!g´-|y|hwJ]- w9p'P/csTڄ>&%$JRԜ=rkzue^7,̝AtX!PE -s3Ny/x{S 06:n)ZγyUlB Z/bTlµ7.&K LZuwT[#ΣU #m;JƎ&/#;2ghx e=jyo \R G Tw^rҘZXw4GN[q(y7oGOiɆzxpX!-' bp%#0kTZI?*oI?*&S)F_hT+z( Nd f$זQ3[ߥ矏NjkÐFa&P/Z{6^ A ŧEڑ= i<d&[I->_7^+L4Θ KuaePpx2Ƞo/'I̡+CAqU?e ,!䐚H% 5gˠ,8#QV[` yAƴ 2/KL Z`B B- Djif[fM*!L+^(}QrJ .WbZOyj}%`.}x;i,FĞ 4cg;^,ih_# Ϙ/AFO>7-%qbAN]h:âO3aXm(cpph+!aLwBCe.֦UԭHPRLGws;൘lv`[6Rz7ZMJz>o+%7k-H@R-̻$Vjyz%7s&SIzc#yUCLTrzgV\r`Dr T$%%6dm.A/@Bs/~[f)o},BL嗛E{NǺ.C-osMwۍ@l[۟ZFJV4彵B9Z ٮ}: fh辺de4$F*C@ʢ Yɹ\i< pz. L={ug=5g=mVKӓ)̃y2V&$ c0P;,\vOSN!8SfU3WW?6[Ҡ 6q[ڴ[ʠ[=h 0Z-(D^?xs8ANKv(غmÑ!Y60*hps:녃brecVEABͣ @蛋))/ԔE\>͗Z,yku1ݧ^vcmL vІK9 @xZzZ zX&A-9Vw~N/WL޸ܧj7wi|(ئT7J‚Y TH%p%cJD WN"[J | u8!00%Ҏ5!4ηT;c1efrjzV=h/#:2]xr~h,7V@IdY%JKP,7H1P#hc7Q꩐zJK#w⷏~a@䵀E Uپ HqG)[,} }tt[zLlE5H=g3ئ+emkngk1SЪRmng'J5뿼ct6`+4L6 K>Kȁ hXQޖ`9و\-.;h0aTX9*YK!3F!~:3f;G5lfQ;\̑ #OW_{"sǜ؏s2Mf j6,Z3RPxjVn- N o询ap4F\W#lv x4#}?Gl9|=^;9<;L(Tj=_JIuȌ+ULiT}ka!{RpE>f{d\+V84/sSKԜ(#'˔h腂qwemH02;;S4n$ ѱ=aGÁ!-Q17A(E!ѡ>,/OL@@seT"X!TKmin K3("5#VDB< EǙeiK\Hyx;d_eo@Edu'k/ޗΜVYЊ#( -_)equIF q(m%isȕ5I.}d#RxA=e(RJ"&lp-3`QYN!rQ51%U,UH5IXb` ́uSܻ}]cJg2t͖w4L`b:OnJiL~['ԥ~|v ?_y}2tv͆Wgg!/?FSw.j<$g`|Qh* Fpr(E.{[k&E] 0Ơ):p EK kf_VǚII]x͌O_gJm 6>]i$i>hyK#GkĈ7oz*RD@ kVdRB\g(wE98;r;8EQ Y44` A5:qQBFlqee n2*.^x!+KAci(G(Ġ(=1Hqep0.BmF`4FGKG[AwbGKV Ņab+%T`/L3pe˱ŚgOQ٪҈/I1-7a8C9yX/O M{9)YVǎ\s@7 }@ۘqb\7Tru ͤ j:&H!bueuvdtmG)՞T=q9_Ϻ[fnyɁ)g0ֲqi90O)ͨXY-&H*i ̖>Pw^wKQTV b\ /i;tJo= 9 |7>x|` q81 ';''&5-s{s)CN- }F ~w5P:Ib":!i,%AQ<*gpnSpϸmU [jF8%HT2ʕ NhI.9JE:Jlk>QX4ҷu_ߕS6 U>ZԪ;- ,|r#ћBDPԺX S]2J_ZJ˵*Wh&RǭLUޝ ᮍ4q7{t7I7.F2&ZYv!,$h ج9 d [6@U *+#w4F@: ^V1t٪xǺtIU+U%tU|# y9S1U8Lʙԏ͉ kKG94)*"b8D%DR:pMFj}v>(ELJKs5'\1)eSQWC|˹RxM0ϝUU9CmIJ!44P4btXkMFTWnѭm!uIw}O&F~oݑbHz_bȔw&z11wňnmLJl%C-8u~JQ3^0*tSݽnR&+ ӹ^> 6urE\~?4ͮON^|g_$=Myq嗗//2n䳖Ä֫Y @I?c*44y4—:3{>~|;DUvukeNazu6{u(jvy#{6%CF q2m^\ Mp  1Ш}FM:FrBYh)G,3A%.oj]#[?g5ˊVeЮ~6= lvbbs^$t}587tAFW8*ꛣ-5b}GحS.*^̪[a fajc [4Gg7WT yh{oh!ML{m3" o4@8n)YkȀ q;I%`@u͸Q)[YK&΅(-5`1V[CJf\K[e7aGڃճl2ӗ2ӡ$߮JLuȚ"vol:(%6Yl4%#Z(%8irnoIF΍s;IA=K[ܫvc 0w).|<w{o?P6^C@ɘNGTpn'䯲BNYxa.JъBK~lj"#Y;F؅b[Wt*\>C҂ o&Pmp=Л>xmS*%R,Vp?̗b88\i_Um(JyWt {NE/RES-jב׋IzQwEy:o+t2Ͷq- WٸQVR(]QϞBe5CIDq㠄Ba  -W@ic;JUUa2gUFiR}nbj!n. mhTT>HIׄTy]5wf mHFd >SQL@jpI&Uv=Ζn+3VRq>-Wv}RVR9 2snݤюl!cF£'-6TJcq!kCXurq_lqbžpW:b!#\fa枖^ WMWVO_YӯdžYFq3)m>i% ]YXab bkt7z3SoOBot_zs Z@ߪ2qmq۸wq[xV;DjR[(ǓΌ}3O)SU1΂3-wgj?=kP>EXS[rp?;eOT19t,?rR58p}aꩮ)+1f^֗Z;:Gr"s⸷ߖ)o)٘)cNY$Oof0ԝ<-INܽx՟7d38%ֱ((Z_''hNYϼ:M4 eDRBK:#R(9R6$q*TK| ڵ Mcj7^Fj9GN85;TTH` ä-<-t`XPgUxîs%xw[ :l4J^2YYKWF,G< T(4Y[)ZmAS{6}TEnf[0J>?4'^ΝKZ&7AWFrN 4U"zaMj@CeO!;wľ{{ud/.=:]g08#ox܀/ޗgl;NYj &g茥DJ龴|jF:pV X:~ht!rxybӾH|sEjZ04veKY"\DSx!X) CtYHPXyƍYʙbujlOdt9f,ocLTK7Vyxo{O~{pTKas>ZM~ΊK; 2X:n/-Rj"ܪu.],ֲ~֙YOCE[j.*L>ٔՈg ݺаe"9p&NͿD/7ÇŽݯ:.ƃ?} _F.@-1Rk3&;}Jڃ>aooZnvzEBFd{oƖwz(x]g!5yW?/Kn缓w^7'/b2t"&oFip&E /O84j08I`StJO dRy`(d NlY%o @ &Ox-(f*c0/e:Bt?yڼt89Cm`T0:8G;3)Ȓ e0)W@օST Y+ Y92QJT ֗y`3N /@~X[x4E=G+ je·5WPHt@Gmu2Āg {B Ph0@(X*VTm] RC)#R"zC=@9#?%+-/،rg+=$R8fdR-޵q$B ;r߻ˀq}B鋬D"uHʱ"T8 ¡DENWU]]]ZjzJRf9oY2o.ec4b|R; :?&Shxs\9id5Y"*\-.I;}G?f¡>~-KQ“Ksd8QuAzh@bى=L]<̧ο4"Q%QihT]S7n)vUJO@m*.6MA He S'߷|TL!PY1p$Ĩ m>Y@Zsx:h] q]U^``Wx$̪TȆ1)>;V\ SQxQᆵu ]R%7E\@e] tPkR5|{ T9^f_~]5ƅ1{tsM7m"jh^ rձz9S*-[&ބ)mlmzGգXc;9ZJ`yTѝ=e |(u^ BW{VGlW#Z*hg6(ÔgOy|T`Ao-y233vHS1U:@xAkCJX#(aaͬK̞k_Ǘ?A[C8|Fkk^\ #4;_nQrQugRggtCۏfZ(@xg9"5|IX-sA9U|`VOMX)bA;q``m_y LOG[Wvzy4\qWSixIg_ގO#o( 54oj)w}~DU%exxa+4?,p-eµAu61tFmwj<;HjU'_ -yu>56Bގx#7$:+5D-g*⢧_R@]e?[[ 2vŏ% 9KvV^ $[I~ vU@J~s2 !F5Ycw,Y#%Cɟ~H#xDŽǃ[2rd%K(uZ~Jj2Z5P |h?,$f'L77'5lD<85 فnPV)I3E?)<0[- a狫^]'Ь:>ml~ˉo.//oNS-~y]ΥͿm~gr O71>, ePEq_]+"h~v2ݧscK% MVGXE)њjd`W]6z~҄yV%J(8*e9>— 4fdֱȍ¡+!t8ޖT:1!~s;qvw>ERv|ՙËulUSD#RÙ ZG'\ \J y6y 6w@OՂ@*MLڕPJm k MS0zE&^C@r>`"#tT FV#m=tL9DaqNStOj9Sv qEz>[}w#[* x(U:U g9r.j5p9a(+Y:b,\Ϲ1:P9(B(k*(fE׋8Yl`L~z~?"+ ctzQh}kv[۳O%iyMF U4:DYfQp?wi8gig) ,pn 7^PqS),¿]P,AP^Ј *$dz4 _?_O5/@Gq#A ~NbS( ZWc~ X!Ӯ- <~$`U@bj{qA3ZԧвTsIYyhjtipE] #.+ 8 CpTXxDETkҍ-F%-2ؤPL&O'ٹ}p/ 6~ǖJF9yN_]^n~Jn}..O..'Wa^URm)@-d-Lpm8 U U^PzXM-"Iە~] c rIugOo٧WR { 0^yJ$Нw"ZfM6!j3g#zS榢QRfJ ]ם'no<ƳÛn;h+5:G5ӽldnn^}kOHyEj$փ{=lOt{[{qۻAs\zw;Dm׏џ]]{w_BCJ >Cv-qg=s(^,,볧. _u H!E&9#5qK":NGbAȥ Y[V3EeMthjA`i,m9R\2F Et.$(bSq^"påODׂ@67Wh[qcl?xDpig%tR!D{nX 'D\Ps f }䁱fT r8b.`u\SުAhՙ tlMܣ7HT"E2Gh4u_=n[\XR2YB76Me˃> 2Kkī rq%VNOjG}.tuNik&i4WIuk$UnЌ4>(}>rLҜnS6\(Z4%YfGAFQ;YGPE;Lܧ˸$,=x%GOHJ3dE)!M޽uf(,z⩉");R Gmif-EP)Y`h&#Pz/s:k (򌑘[*t M+[Ԃ[5zOBЎht1ýw21SQs.A$3hNTe:,>*9b «ɳɗUA&ٻgNOUr0jC}Y:u=yz$Tk^YC˥5>%/Umc-4>\.F*C95j((hGPڱS@')Y"݋\K E]Gq,o*#cԵeҗbS.U{ɖ|4 K 5MN5 *K m LะdݻhzF.8S欐*(g3j<MOdmj?7'ٌ'2) O eJ gWjǓK,@5Ǐ|dgUtZgfHr8lSdf?u.%L*n/De'uRrh4\ !۰&<"Z"mF'ӯEnp)JbbC!{Igf < hk+IxYG-)C*ׅ:/ g6pt8 gza3hlEɫ"wy E yL%W9۝RYD:̧b&rѧ1sG 9OUl>ߝjƹ(59CrrT%x*&-?/RhQ\T:IޜLݲRAafۤ)F>Mn.eUү1M߽[*|?h:ƒ4~%(|y*'e\볥zG5w["ߖb'Rz}s6+V.+1$!/\DԮ){9\n!hTcn N+Uݵvk<Ѫڭ y"HMֺv}aڭ)UDun'OM5Sڐ.dJkMBz0f7g3Ulֽgp=EtwLmMބ,K^įIĦK. I㆐aI&̂5wDi=Uxx \(V嵋"h#BP%@ ^?ޥ!=*ZWDžX>-"\Gw(xT :3V OiaTdi!"Ԕ?떯?j꒚P?uڳ}kZ(ݕ(=GPk֕ x7+tg: ^ݕrZ%ţ,x!?^W, wlYJȟͱO7%~=N&m珖G +Ŕِ' Ȗ\&N?B\$9HIfCv̕ZN. #e]\!.r >mX,pU&pЋf9\( ggD]Tx67~>Z ډ"?%5u(fF$]b\#AeRDPcv),gS(L˃q>W*|gr(=֊A/޽Ģm )lD@631& jfTP-U&" 4撢r ڑM^(zkZ# ?]='mPܹB*[UL 3&AMwOue.sv.3x>RG`0iGQ;!YL%E'HLun-P+ E*Uk~Jz]sbkU:ӛchcej˦Mete@FB[O 1[O 1{0L]vg p'>~3Pͻ2.[@+傡Vq0x{ BywImK"HB@HWi/FN{'1a޻ߜ.;y; xP ȫДBxJ[Rs%B yx?mTI+ 8 p܌9k^ re~4Z'`/7满}'0S*fd'Ψسb QputK9^ɞϏΡcl sQŻġM}'Vv\Atm8^pULh}L=muL՞X NGN]&5ED6ϴ7a5LPZ&AR:\G RX9GJP6]P{lE ݈o'^:a?X:MAs#URJ1I)B"Kn9Yoc3D.Z'y;0xL!"$U G8Pc>2s ZDt2serߟ ~jsvK_~NjAP;2$`Fk@T!ꥂL)].~j}jP)|:P^'F-)2*L;*1,af=w^+LC8q0YPmJ-nZN5쯅j(-SMS]jj j[Rp^stK%kAeN eisg7~ͣ|_G,~ѥu1ߎLP'Rw+\W- o$BqV .+Ԡ"^92V)ڦuwK*j8:{_$K&/לjq.5Ld\*8f-"4`[PSUMOe)h.Rte)w.Z\_Ϭ|L)xGAͷB;R,wY)lWR\,q^-Ҏs}}Kje;d}:u1悋NtY{5 < m5:k"Ϊ-KqƷ/)EDW$z[R3. xүf%o`eXt'+=PM4sUВ#u=.PWH5r~碘`JI7ZuNB+8_j 9@o|U㮴3%Ttk@(Ul?+T=p*E;Hߐx<qưƊ{GWIݬxT-:ҌD,dž %2h$)t͐Cs68#+q']SE,owٯ՚\͒˂u L(_͜_x"{G^?TLϟ=5|O#\Ҝ*4SϯfɥG?CA,;'{q0Ǽe>YE[3~;/X;9VGB"pm 3Dl 1GcMfqmP`0#G3)XvU7&X vZ59֊M)H(㹧 S,s!2*dtT4,&XBӆ bIi-ZbF:s~Y1 dč"`C [f<b tE8)"Zqpb1{'-1C)# sDyk+ ¦ՂYx PTh["'Be?kunT`Rzpb N5SanV Fx)nũ ¤HP& $i%ku.9[ }2EEݵIT, dM^@h6b!A1VYX8nk e#\Սh5aK l 79˒_70z *2R ᖭq' B4 |HpBgԗ `Y62՟ͱZWxr3A #Nt=&j &.qټ>Ӻx}<5o{ghfmt&!\RmtTGi1JS +*4sZLMZSDITڧ "<_0}G' `&4@ -xj]}UҖ9ÉKܩV@)y_obր],g`l;cX+󒭷xMKOb'P[^O0>b9aЧ &O [CӝO|RDzZGGn.š'rBNst q7G;3%3՛bj -j3JѕL7E2CZ+>R9TI!iܸ9V~b!9+!WEܷ;I749b[i[7TII[>L (iH ګOɖcr!)f/K;#sB0W5߱XɈ)fFJ Y :)O:B C;d 01[c"aT-s`ϑs2*XD&t6P]}EN}W18}~x P G^YznƂϤ&(Ie )AiǬFvu1\ 8**I4jrm:-Ȃ$HdsP|&q Ra|b Ű^̪jpwH@3xAƄruT[V 47̗0nװrXC-A7UK\Iyi݌E֩jގk:-VK>NfMs/qd9p?wxm{>LƏ7O Vd53M}Q&t0.dڙ)B ʮqFE^|) ۻ81$e&Ǔh&3Rg~i[7eΌEu*VŪ"W8˝`z0_܅JkKFn[Vo`JzV8v^bd% y"$Sk(a#o]nYY Fl`Ng6QGtjqq9{5Y=ŏLW Ju_WR R^ J(%um'kڐg.˔dpv?*Rݧ'̫LRnIPk->M/O;;g>mjU;*y u覕 u)|ᛙEw [W%4)8GG1ƹ2vC]!b=mFlgqF{6ў b [v&;-ј&T6 e(!u;S[uzS')J!.O&ǡ} {ͷ?K&$3{s[+x35:RL_+tx{…XZ*rjC+;ՎblĘɭ&IȩF%[z !ZZ\׶Jf\3'ˉa:7O7x$ܣg=BZV`URh0͑ԄEJ$ " 7U>*q7⩥ȃlvQ]m/Ȓm/J^`<a%v :S$i#pP=% D Lc[΋yB]wEI+imA@0jŅ ԡ+6}=0J v{Ϩ1Dq_Mvre̬?ŽG5H۫k/(مNTd)RJȧB64~,O2)3_?>8T@!GGv:=&y*5ٸb9*u &Y7Wg~y1 BVʼ oi<( zA28?)RƟ2؁˙cH(U[6]t<ޅpz;J~_-eKw /)N 4"EF6 r#hԣTܝw:An'MBņǞ67Thmti(N<>򤹠?YQ Wy#,(ռ㴙 ӊ-U;O7ՊL} ()IWaΎN%tBPWM.mltm J!W'F֕z5),P(z{و㱴yn Jj.`P]t9o"8%H ɟ)Y]?%l9#rMNL6n͒ .w[^D,7AQ# s}&»c\Qޡp0F w8B:ttXK[Qb I:7e!h?ϳpw( 2IՖר [׈.5Fk<Q^(Swͼ#J->W/ǤD_ݘʽѽ3p7mUmQcZsۻ974( 1:e 4=O/س) TWߍ~([/6떼;p@Fe;=`k` R=[7wiܝP?f&, rB h"+m# ca)8 }pa % i a& "rJQI5.{;DCI[[*S+ ]PO 0AR'*a#.3HԼp`#l<RF=K"P RY>PT>ov:jʜWuXP hk1L-7J]2mNB|(c _4NnȉVo#I^ qaޓX9xꮬY(ɛ-$.B/N{F(hD㋗[5:^ H{vg0diU3({b:1M{_gB]wL5ݳ y""SfpKQ(9zjT bD'Mp|S/i`vkCB-SU1P0 s8I8Tg GhVMGvyK91OyuMITmɋ{/' 䘫6e©_Y5(䷮n`FY>?Y|YjqZLSMFݍA hY{ HU{)=;8puc7 6xJ7]B5ڥkZn@lZ1ܥUspX>' R:4yyh0IPƐ:./o_'u7m55ϊ舍{lPsf"../,Wh1"եfs:_dGcp>Gcp>&˵W !(/,g H92@ E ʩ^rq;(n0$WɻLٸ91I}ѫ: Tt'[M,_~j ck$NMBTNN_z,^Rxw/&0o N)QBwoJF|ܙ*jVf**$ }ۣDZtq7|wy#ƘW$%_4Еȏu}11Zys+oC{LToV[|cк$xܾi]>Zʻn,ŸCL__\WUw~, .˗M S4LR㗟l~5ͤeYg27Ǒc2F/fgý2[XqUN*=p^9Dq7Ob$NQM;~_ޠf\ SaDwx ZJ_zKݫ>bwVgo7\r8I(w־# >mV)[F H{|poo_.#G $KT"Yt{>B:kcׁ8wh\h^u_tqx0YlO;Oѽ3p7m?5(&J{4r64rӀ@IJϗӫ~k'|U9ů)QO;dRH.bBֲ?ki R8].B[D'<~Q9)T3zҡlV+\ jz7PV 9A}򮛜,T!RvfAy0aS:z C>:D@;G6GPHw1T KcurUD1 O s; nyJ ]x<]xE4INn\[b@햊A褾vJ\Otҫ#Fj6$䙋2%%&nBDZ t Zw?i_'i4y`}t'E7'MGNO߹մ!{x>d.l%<da\:1ѣ݁܆V/ӛJc%q!uW֬C˟&o˽"_7<}<&]Ŏnt8[ʞh|re)d*ǐayOc>O#UuJY&=0"S T~rL2C5̱]/!vwspoUyhCqҙ_l%k!AdxHWʴy.ǭ8T0K~bju~{/N (NȈ(#8TM.T3OE;N&;M[g:lO1 ]##v' 1q(_^ []4 }[Kڶx5N%P*[\ " 6D*"Su0;SgvU6aMTXl91; Ca5qHSoՕ9U8/,,rsSXc+oH3lYk^SF&?h)g~: NN9eݑ 6I:^{i?tE0f]DC9lEc!L 2۟I21I*d)v-2OIDwLC'Xgynyp\n>74"Ä?G]ϾxFUk?wu6wm~9ve,^drgMyI`3ڲײ' ~Ȗ,[ &_ZM+XU,֗~]7I2@(}*A5YM=œxGWߚ J-Т Kvӕ Q6w:g*=}y'-k{O[ XHɾH%CcD?Xg0B#a9L/cLz: "eGR9Aqv"_-&.ga?䣳=u'N~4PWL 2獶$kYgOвlksD ~ ;A8vHr{]G'Kdr$ Wۻ0ckVҭy9e}DnVJ+ч'm(4s[߼Efw1rX+-1эZ!AFᕲe7WWBB;J1S͐Ӕbn<1/l'.*B:Wr׀2P嘟>%S,JV`183^{)s($Ҝy )@MdI2{V^5yx-nmɍ2ѿk&aHfBbg@[c q(u4R"Lr8Em@)g` DA%r@,la33xMPHZʂ_iY\k0cxTA|/mw2%)`˹p\=ުߛANHiew\ ,JR_F,/Jo~wqpoШ crzwk۱g_fĸMĸM57CpDƂ+ePZE!=x]W &b1J83{(0zG?|_ޠ?8JUg,79e Et$kNѯ{.鮬;\/4?$Rh#vb1!\SoN aJ BJ8]Ry# -du$]* D);}Gb}-0 ##A2I؛ DShRxi@Y˔ɲݡ^Wx# ED{&>y(MIn#x0´x̍ +;>T4p#4-6K+K %8 -*T)=V1s G P 6B+zH:k6(G5h#nzW;'RG [Q ='{ Ёܲف{-lO^]t^gGB5aH|Mqrbg*S *i5B#0BiR"5JCĖ ߓI|"{d@{@v{5탄ΠHs}S`Ơ!3ȧ|LTOBx!cJid{B"e^?}ۤ>%=xܬY?z;RϑM<ԙWo'M_{p;|ˈ/:o?Kx2GZ\ 20i- Z>ѻMAZ)Żz^߻^%9\ˆu.6̟"wwc8+Q֚['_]yE*~Ά$xV<ڻkз|ߔs Y9#G&Ƀ+C]O64tkwcy HDA%)XepԘCNсNG+a"d6"8* ò ̀3G'򹀼DL)3Cs IJz۠[ %Vko[8`-ڧw}D,ǗJ1hq<Uҥ l)TlȕoJ~yrd]P~t쾧%Vߣ`P*GWKp$9p~(Yu%uEXx=鏪9-T~ʍSz䱵w ㌢-}+ׂ>Z[B\ҮYKw7骬 ZF/q/wp6詠RJ;`*j<_;-)߷w^Kr/Ǐqc膫1Wݭ7}G_>\&@0qBc&/gXr mD;smz<ҰjcIRSIpE=rHB}GpWh F`A5 { C1ԧitV󽇉t!%CL-

/[0goj:b:[y^ L.]%oVFUmjqv|ڵLP m~1su(`G)Tm/2Y@: s)VXȍ$X7 ŏׯ[u2@H6T[)ھ8tmW@^psg`BS 2f\,DY͘Z^=zʚ_k__<-O Yn Z,]|en׵ v̘bF#-$fؔi1)MK&'<4^- 2tЊ9ki-˚ arM!<䳉)`!?}gS͟pK8o ;_Tq9 3.{Z 1`/(ۋ=M,Sz}~Boʿ703s2cF8VZ~ztXG21TV5/ e6EP Wgcr)2(%^?_0)"}„0T,됳xe-vBQe.m0s)!Bq8mq&3IbYYS)Sݺ+9~g2 ZdĀ/^ݡDw&C7Uw~an:5LvAsxMX9R6`vQŒ/rbm|m~ibgp/SKD9l) a@xApb!TӠ". ^@t Z?-rg:ZE%bJNvoOZG/R !7c!@b 1D "yi~CS (L»!XZA!x,R~4].ժNd@acpc.datR<*1Q1邋$a9W=Р[ % 5a앰=t[+Kj|8nCH'J+Do#aUp\ۀca&PxmQT.P|>#^w^;){a$Ϳ/ظ,z C=Qչ~.OFR;rȀuU}B>d&!!C.QwstlS|ԛ4D ܍!b( v-{ DcJxWr1K%B\a6Dl84[[Z.WL˸WLzx[Ue-84LZ^Ŏ2_Fe Cnk6PRY6P,$Dg3k iS.inҊX#O)y?ox_4r~:<<ͨPp^K]9s@s؟?|^G]r"O(܋sӬsCjZC3ASeHoUF1L>K?#D_{i}7Z"j$Q!E QT(Ep%bSh{4F5ilƃm} bV̔ 0[d1Srز񾊦̻mg5 fl<ף6>qDN1QtTTuČm/%Us˨ :7Cu!mn/59Cb;܉2:lc@9UˍזX0$ _Xa4"Ϩ)5 YTR0bD nQޕ<`ᖊ,%18g 0 %#-,89\UmrsnD810.$\BN3 3rUC5 Wj:AnR[標30pBڂAfZDujh =qD$RQɁGYS.s9 c'xЮ.HE"u\G[`Z+4w#L*5f60 +Gog.^ tYR: SHS^" y p%&hYd5%j%cդRs["m;)HȆe \y9BPQ8P EV!fLEsk կ\nv eD/VdJMWANnm̐/|)8% A`rL ck(8Hn{4w+R_[ݾH$BKh% UoaqD$k>v_IoY.2saF _t ,Ӡ!FzNkmr,U7ϳOypeYt8<%0f˹v³ҤOQɤWzsclG,:+7_sU/5Q-:w3^lERBȸSk04^dIpSْN iѱ@uC{4Z+uߒGwаsG";L"ӓ=vjkǨ'{ë|FKIe )KVu6i҈V`B,X5YFpQ7NȜ"F \[x[2i;50Fa# fT  aʗm&U'OwуQH͏q^l_wՈL-փ+^A]~MQnλm,/'Gط~:K~y0&EA~猌`!۝ok7g7q'L0[LiY۹`}G*'}HpupI\`w|UwN]RMK5dLⲝ.ҏi9[u|.PqpƘũVRf.qT`x61A(VI[p*Ϙ5&x&T+\[rö)5Tl3N]W2RwMU5© ELў~[тvC!hTcli^ jd޶v=Ѫڭ Etkh6zŏq;S}{:b!i3?_7:%mV)f˗ݾw#'sTlB<} 5ـ}c]7[5%̘:)33-߈=.f*R}^'"aLFzYv{G mRojbZ$D줪 ЎFj:qNyQD}y60{~ft3vn}yV|!:dK Sb`Lh Yc+H)<ޭ)H†QeZFV 5g*PU566!j/JXW3xaAĐۏ%\Z7wjtL;dy(VSUЎC# ZC4?8*IVޠTQ & /)Q@(8֑;]LtU^t ,R2ÔF_[.Пb3_Ƕ$ق\6Rۖq l :-O)p l!xK|9EJ*@ktkGF^h{D+ЖZ< CQS]܍/> }{.~15ݫ<;oN7d׌p*onOi}5N[خ)ߞp`R5}t`Jp۔ |ڶpT&RTZs <SAH LFfȥY#YޔUcjnx# k4:EQM.|Tdx2VbB.k4k"L.yokJY1q3i9 @?*:*cCfuQS"Ғ+͚.ba)%kD.Y\'QDwEZim\Ȟ|RTF7۶`VS-e^uhvq27qGNeEK^}Yk"s9/ƳzA$,,۴8Ueȧ%oBAVf<8; "ɠoJrN&hJA+4g}{hR#v*5frF!GQD0ŖONO&{%N0h'w '4j4\xoQwZ43GT^óO@X{0܉;iQ{t$-d ˔* 9 tNNo/]g+mSf_co#ʊ[1_zّmi_nɩs*Fsˑ]Tdt?|3(Oڴ.C K*AZϜͷYcr'7EDR<%0f˹v3`FdҀ+=-; _t@0]hмm*ۏy%,^ĘtDj1 42 HMA jA; ̔4'g L"ׂfBԦAFe"W8!sHpucRW$z= 7|Xk3=︗Z%{hص]eyg$U%V*nKT4A5p>$\b'\2T]2 32#H %MOMِ4@RLV #_ w{\OUO1Мp1Wrw MH6 kL'𾖰fZe'ng`DyOj h`>XހN2pih!]d§aG}^ 6\ot'ޏEpS o~|s% xyw/JG?_$+ly[F?_~~%7?N̕W]?ƞL Oܻ߹}Gϗ.8!5ӣ:͏ܜK:Sx ûW_K#r-kHVV.ieiml9xѣEv07 \<>ƲTt&0 'O] CukvJ .=dczwՕG%FPû vZp8lWgsㄞV"ʴlVS32T{&ִp}Nq])1gq'_IL#XIUk~蘜h<7o8C,#MX$LS#xJxHCփmj4e]Ukݲ `equ,-怊 $wTPAHa貴J֒b3 ̐̐)9*W~y?\O*%ő|M_N&龟:Wȏ|q_Uwf6xoھ%%%ORu%lHF~>AT{_҇y8eRޤ'B&K Mq%e0X *akBr/(^zq\&unwF<؉Wh!OZdօ-*swg]6Y)MϪd'/H\@hcR0>}>L%1 d0 &2OdEA8reһ֚b*jƇu/ME=MfmZnը""҂֐j^g{=mc7h«ciekϟ!'@J``! =5L~NCoamCxu?LoR3iFM?C[o:懟UoѲywU]cu@j]jbiO\Ԗ=ҴövNSi4NkA 9M'51hc:8ruFZ_VgE۲)^mjTeAEk }B1&61J~X9%yBL݃.аkVKVo3osYT[6Ӛ*Pûn>#\f7Uy<€9dPVaASȊlo$3 I IES QA n,~yb4`jCg-[HcigZ}E ImCR:Ks # 겝!`ghdU1_6MTJc5)RlP[bPO:*r3eA5T-O|V*!U]Lk 2vH.Lʄ".H$u҃Vt*+,08=Rx1ձʬ0=|I8nVNC/-JRG~A'!K =S䁙fVh_"ZGB9Q2)kWJMch:1UYANvBr7$JDlFX*3 NjcZK1G䂒W2#w "P2Ƅ^ '.d2h$ɏ u/Wtgp/ٕ^-RN٬kmXb`_ !`qr6AÞ̄s=8zX~M9SUu+f""`< OIM*ȇżdd,4O`Bc;!q:WD? B xV iM[kPpA^eah%]` 1gv [Mp\Ly˳dPyBnj: éX~J(|phh4y jf: Qe"q LFȆ >۲kd 񣃕SDZ N3C+aBADc&f)B&0kW;Mj@x(e*=cM:1'ɷ9kD#H6޵@>d @7i"`DBCg Fɩ#:PNtLDyϱ Vrbh,c>5)A&\#(DX#x@HbAhD2]@n(C|I ialE#$ږR6鏔hpu4mnПo12xӄ@`#OrN|OieW}ʂ\ jS Y\Gqy3*5υ r$Gqo ϨZyR\%;I _{|tZL"IfߞE$*>;$>)6t4K Np-b㨡]IRT)zWaeyc%Tx*VK*LL٩0U󵪻Sa 2Hw*Lj9vzpe֋Um#"hSdn[Tc  c}F_֋1F%|Gݶ|![/EP8mm׵bQ*!'˰7uN8 I"{M]nx8٩ Dp<İ74 {&ČKƅك:'i$d.s`V@_w3+}}vr:&/d;y"婲QuΨj6\6ݻ1zYt!,9;r#P]V;r tN1hUCf0<"F^!`DHZg I!4hJTcW i+8pLʞ9!Bx4{WpHf<^t:#WHӿ]h iP tCn1\PXA0!r,5PZ7Bn`(CFjDZd Lh$t A̽[ǩA`c/WA>$=rP\N !ڔ} ĵUkU\{MIf%7%AnJClYضq] #-KZ,/]l>vz⇋sM^ڤJ{6qsg)$MSzi$P);]p!?'gSͭ^M?}s@G ˵4C[Mтib}pX⓺Zt:>|8g+q_ F_te yǔ^NZέ!{@2w}7p)yDŽ!GR hC]Qڙ.(LW?]t糋iWhVS9w:JQJc:*:z0 bb)5^='HXU#ʉS Ķk ~ONݶ\btHcn08֘#bӝ<涭W`#9& sÏBN\]kKBh;{BiMo52 B p]p7 f%D1A2;xk"#",?S >{OAy^ ̏ҭWpx=kIIL^d O&W$bt ' ІQQacLLվV덟g?U nK6[:ϳ3팺].ݺpכ׳)=t2 ps7 MatQ4\52 BN:kw;Axc9)8x>$Dy(Qy#.ڟ5\4ä1Rzń);!5"Ya=%hf?fgIY<ܮ/n?8|t~Pyu%DDeP'\\PA}`saXyZNU! B3! H+yN*zB' S`̱hANSg7#xz,("6a&pa]Ay y@;wKo/rxJ>:V4#POKIsd\yhdI qR*+ iTmc 6 GH3T!=te~ ii) k絲˃dHı*o~\%M~n/$ع/$ |HBrƈ=!\Hxf”X^;TePW_-f<ƹU܁ϼ/VnLrw=a~1櫒zݺ vaӇ?1Yuq07{~}A;_LJ_Nft qBra&~~0̰6ܮڧT~}*0y m z\nSQvB>sͱ)\0յwۣѻbb:h[EVhnu[hMA|FCϙSJ;a$ȣH}Ca(4oR8aῡic;͛V=.AssJ]KWI=._6$y9HCCxtq >?aX#cսrWm~`oxƵZ#uP@IEJ:ORLE;ŌԕЩJWm ~YjhVT<6?BPvꋣ@R1/{}SJ34e J:e˶2RMUc 1/[ch N)!Z|b(*> Jj\J]Q,++p'CWỊ>Omz f( lEa׋q &6Sw>h Q- sPa63bP(AF84NsfT Œjɑs: G&hsH0)&7(ؾrNk86($DJA 2yK 56 ^ʬ! Ǽ,cD*%j/Fȇg__Y:4UHy=Gw@r=@48#zIG[HrN)JÎa WʗY^N^vdBINH5 |ti<,$W^/WqEY8HbQl/;&FȞx =%v+P+܆& `>@fkt/ЫEC$g\eW2푍'O5h kP e/CX[9yKyE[>%h32X$ޑ%ð/_J?NRՑw%>đu~uM"sp8y  2xj/IP BPY{/1L˘ aQ>ErOP J3G !ZHtbB$'|/!zeaaRF{J4RVR ODK,+>,%2kCoP 0P|RRqKNRHhb[hu[ᡞI 8y|OD8bZ4J.` hThJa2y 4NrE3!<Þ8{ě<=5\)I.%-tWlq|؏-Zr̟Mc:+ Ͼ~*;0n۱o~]`3=Ƃ7QGw3f܀ ʲ>c 9SudsB7_ͯO>  .x}<ށAuxVWw(2N'@RBe7޵57n#뿢KvBЮLRs2ݼdEX-;<?e[$Jxi)Ie24n4A}Z\0Pp,`nIr<˕=B3}jY/wƾ!u (^\w A} A!"c+fY}6NZ+ASU㾈jPQ9u %ύɛ9)deRhLe!I3r$\f -L 2^7T`HcH, ԌrHJrRHÎ12_^PX53Ԅa]8͝4B:Ѥ$R 3$lt3 :VǓc"!l lxt7LgNZ%% 4]!N%]lB5h;B)OozT-H!5Պ q'^ 9~n\h8ONxs~1K="h.p {яũǣ'%o|h1=?FЃ :[ r 5ȻGLqWvYJ%؞rj aEpچ}Di3YZ':SCw6i@~yc8NG[/M~}g.<ғ=5TB8 Iȑh-*쫇q]!}nu1sTn{JLnMHȑh-B{;yM1Vѩ;Gv)δ[ڭ 9r#StvCb":u稢#a3V?(P5!!G.{˔&\^7 QƙiD$HHD*ISM F%nb.m4syGz7Bۘ K=#;".-IE_3dZEaBZΦH+@EoG!TQA~CѸ~"!,F/hQ_yD] l7\Z>zϡmK [/Wj95-}߈*&6Ӯ*eŨֆo kmq4_7 bѦt*Fƹ z: [ƅdYRMST ym%YG);dT;*! +!}l,,J|D?kkU$iL֪@؍LZAN҃ݵ}*Gq »x~S|~=I>rTzq_4)7R3R4R-$3J")RPD Yޢ 1R -TR⌢648ƖXLS"(S%ahQ2@AIKS%&MjlIhb,qS,qce30c2Dd­J5X Zg M Mb,)#6U1uB#IQ:,IYJH´;롥9v[MXN~/#4kw+-TWaƆe"VIFKQ?dN`bÅM /\ PR-:!'Gzf&}K30w֊`&:Bnfsy(߼FںtZjRHlQQEF5L jjoy5paYibdBo'&, f:4bUl55TWѤXeb>L-Xu߮tPR%mD,'ɴW.fۨUNEl\kJ:$xQܽMՂY>zz5Mۮ^z'#h.^%P*ʰUu',#:D@'H e8+$my3X8dA!+̓/RXR2 @R~:(pI} 9b Io_DkVrڏ @^_ Ӝ"&)دdbuu 5Zv  lkĽT=3ϛ!V)eUx$߄4͘@TIDH+#` ij$JxsY,(ې˕m]vL|1k RFX JHL-4ZЈ"TiYx _ kA]W5a&HK+K@Sv1{)ǽoJ115k=5HlS{V(mo&fR3 ABfmŀJsU񝚬ۨW} K^c;}ygrg* bLfaT٬MU6*1[?ɕO'h/c>s&PVV8K@@ei:M|~Z1"[!{5Z~FXL#Ƕ"+ON8*ɬ[J H Q3UL6.KW`GKM4Nh5 ވl?P-;y蒪oNCm %s tgzZfI|hm6qv%XJ wtD4], DJ M1fU>mxK`m}r z)0}ڻC_8|iŔB//1ƛxpP};"pK̯x^=z| 2%OiϮ'}1Jln3'OD秈Ky#>/}e1qFƉj*aZX",! Hi#IyF D-i>r/_wNLG|Twчy<r?bF6-9G+[d~5ړ-wVi>0Z.|1E!ok)ǟ^KpY?-= }/kT!(t2' 2Y I:)mnFE/w=yTՌJTK\ Ψ⑽LE4*dܠ'-kdS][%tTR,S#`LYQEЄJIin2 7> TVЮj/91XKC B6`1mbӎ0B G 5` #pk"W9QpK݋kR;O"­ YF2T*7Da4^p RZ:;K!4`ZkL4-Y^\ ߭ 0[t,aE-^It͌(pV gB0&PB[Ai!AP-i'6l Q;1z*>ݖ(lKcb$< !7R$З`q+ )@ wp5e% @OL F GdA2Ԏ:> Uv#We{{F?w:$A[vezW%?,EC .~.>~V`QΗ?4t;v g~ݻr͉HJB^Vﹶk}{s>XG 20ݓO¹AB c͠\f Rzl>]"vqAQ@v@yɤYSq˥+5ƣ %C`)ko $z Ѥ+^c,fDe&(648F$LJ>"٠Øoaaz: ف]u6}: *8 U&"pi*{'gd( ƱSOb)P|z6S  5wd?D'(h}&K on2٤ˡ2] e*,$sJZ4䷗, ܂BF)rc w VOI- n)df V*y(hF)A3+M&rS9pT /^`@O0YaQx͋Lr]_p+2N$|j VCkV5&;O14R ~ k;o>Wɠ^}N@(?]>4:ۻ4X,Ȕ+wd<$3*#Nj3{QWXbLA!Zb)t:05 ZAc&cݙ Y8I*]*0 ik =\LZBF8Ud)^r*DPHŠhB3 22sup0,sN5H`PxϹʨ$m VҀ'+E9AµJPR+Ud\dL*9sM(.)y b b<$aTK@\j(XxMM"AR Ss.$h` Ld =G/)$6/ KȽع$KmnP Cãa0ZT5%gkV! Q7a"(aNĴѭD3Պ({-DL*W꤁$?x:V|WXsvD@)=gFI* 0Btu[B Ok $gBFހD5&XJ7@W\:@Xǵ7oݠR AWNDnѓ*sd6kAevvEջZk$6¬u-NsZ ;ٳQamF#k#M+!T1j Vk9~kԾthm{cs$P 4Rve1ܒ>lJ`G 9S#_;CJSK!eP?-w4ηVכ[e"_ +?O7mTT:M:U4wTstƀq`՗TXo>6 Քp8XC]AM4QkI©G "n`\L~6Չod~>hM׳׻_߿]2V~^ݍ___,=IB eW.UR_ ? 9wMW١[M+)6?ex# E4INnP9|L했A$>v;j/޵[:\D)FHX}}0/vw:'{1" } /6Ù骨TBmUԟ矞賕9S7{|usK]LRE.?[[ۛ1C/q?|ŎGk<#Vh]|맞O_W_5LV77s{2=@o޾cj?/Z0l7V9!psGG\=|1@7I zlм h=7$67 oC$칿[N)1s@`E[P&׍o{Wyтi!5 4ưj 6 gr`+)+cD j赿(x7CS/&N{8\D{X]>wX;8Ê=ਤ  |}p=<-81~t߁J$NԣNGB1XOfԯ}lYVۃ oJet$8a"!f\zIC긎%sK#Z!Y2k"Q0a06H6$/ UEr'97ָ Eۭc {vy'Msqn++47saTEyM!s*GB˰ },~?V2SNYߡTZ~f"Oy #GQ,g  Z:n-Q{-%Hȁ( OtPʐj[Ǫ/jtu YA@^ b v=h\jh8v5xEFWm8/wP !4nahx)Ssz62]hL-O5i7Pl S1gnGLpx0ڭ E`bBQJ;n:Qx*"/S0SJN(CaPžXфfhƐ^q 6Fg{UFvX' js-%X*]*V`2Mr2 ̵0p0^yHTב\UꠗYZ4ُ-?r~y{_|׳m3 Zh-^=h9e: g뙟/Qu?[OvV*Ïx[|s}!+՘oE | :m}b:`ۨ6]ݏF)QΒ(Ϸ~ee:JOM7&u1C4}E1-\wI:"J o4%)C#i>|XKOKYڹlp<c352.Z?L[-7q3l~6ώ Ў]c$v؃CT 4lE?'wXC:Y6$_I}mj {Dp[ȁ0Ņ8IB(*t*Ӥİg P*]ɘEl/qg\P)`֘bW#D|w&bZc>b1Z *jTQ%h\րL>V n:1z{Km #ֹ;<>A#^f蚙-c='D$4 Gxe$cNerS/Oց<@eu6wBiNֆ;)FL=v&eq[:n|%H̦"< ,v ɠ>)QN=ʍ3AQЃ HɄD3JFqu{tзZœZwm䬒ݰ6b,npVJ?߾raw}+Gr:E*vgZܸ"̗]'|_a8{g#/_$۞d^dYbK3z-vK=zjտ"UzO]ځ 2-YaQ2@=az~ŧ++~Mu` ګO,v4sc!>#1!.BG^b [DaCXd~o +:vGn,387<7|-̂^-ۛ87"W|hGof_p \}z"^Mp㛌JHP~carnt瘷in9W-\a&^H!!~Eq2^Ot}}4Yk#[Tnċ,|@W%uєQ1ioƈvPunf~Azc ͠{H[s$FRaNyQbb1ڬ05۞t2F |#wvku#_]T_"^T7n,/9(Jc)REo䚹* C׫h;ra/ :LcF|zQ<;zfuzLL/9X c×XINZ<}#w;}u(9~ju~8z~-NzintH nԁ@$1Ƕ6BQCNet49!zl\6'S1zZ#ǽ`At=)w.ÏKȠ(8^.m}@L;=:w!? mZR;ԧs@gOzh[pd)uh<5Jɏ=q4tżWdR/׺l$O_2CܙRm4i\oz[f7Q\3VQnјS\3Q欖*ϩ֔ҸRM~FmlXb\햓;ѫɲ)Xkz,??=>{$m95cQ֑f /*Wo7gq[4,x̵R{(x Tk *HT:㷟{mU9# rR+$)9LH85=6 Ԡ XemzɸR\ss'1cx',|R(Tޟj1۪n Ne%Y{ }&C#a.Ƣ"!zMלs'e-d)lzrCilO @,'i-HXc*t(Adž+!evWB,DpY1 yUIؙw4r~휎Y\ŝKv_nNbB/@ ݹ 9a=#C|*ODBnj&B.@E} ,6ӈV߈dNЛ \Ze*u+tXV04XG_ia>@ |Da> FN#&a>.0(h ",զdZ4PVVpUUscP;QVOk@y˜t(WQJ*;%8nQŽ\TrPIFnA[FB-xrz7wW+lv4ݗ|H,<-Z/KOOBWH(X ć{k6eox]=\e/\W a h~Gǟߜ؜͗,x_%>OiQΥq%J?__-,`ӷ'WsE"Rs8HߥAQJ!P_KՕ,n],a"7{NJc|j^$c5npGb![bӜNIAh5bAYKNU5,qT7eIi,h܍Z\lZmi]^Q/\J*P㥔$ JSɌ$*N>BD 0ٖ0^4ʗx:[JJZbc< nJR9e*Ei9td(ji-e;2!0D[n+ ' ]e:]s(sH4T2@2d\ܱZP7.v(%(tS8n|s h!sst߮.(7I6\|EcJ]\ wqKp1@]Ugѥ[V azXƇ;K?D!{{y§9[|3NGCDnno?Ir1cAפo~:*;//4 (YR?}Z͉иBKO+ƜX1V0Վơ>odהFOOn#g} A=XM|6oR̢e1]io9*9V &\<3fF-%=ZsAFO:;pT z/ftҭd Dp94ef?a[N }7L^9UV$vim|1 QZO܇w14`2ؠ@8hKhMVRqC8$|4i7%GSw;qn e0M2Ҙsi{F]hu͵?J&BO(zਸ਼~5-[H,W[d뿅-yƴ}Bz۠ha(,"**Nm KRTJiyl Rf&E PKtngsI Ē_ ?3NCHD/KB0d%Y<0KNG B| F-9̅&ƨ$@~3S~:(y Vga9pz nA_1DV(2~[Ve4T~S9{fP'5.Nko]' tQ"Dk͒D,(H}[< }K PZ%= зtf6{4>>#j|܉ )f]:iv-.E`Ȯ`!/syP$= wGp xˑ r`6i!O Hh@7\Ud҅V)C~ݔxx' 3geU(I»eC$Aޭk«](fDzb[۝k[͂å=2ڣ=p=P! @hsh9'i(ٓxkw;}y:p ]et7X׻WbdwH BAiC$G/M%+PR| NeTLsJXTJ!(DyLbn791wa&+4?sb (!!в 'k6s~I1&}O#aVK*#a{mqwU#tFWR 4T9kR2ut9L#13sn{2\beuNg|fDyi-*H<̠8e OU3*S(7fs\X+ۅ˛+dlKk'^!DžA+ITQJE )p(QQFvzc(^SſB6"=#!+C'kTP 1PP9fRU)vTb`F/ַ!띰C1*^3kHI%gc疅`*IɴPD֬z3w,T+S0GJ+;2_sV-U%b@􊪑/Q,\/ W{j NR=[qOm`Pn&a|_X(a;6Ԍ4z e@@i{lOMR~oֽ6߳1٫MJo?α<Ы8 w?]GcGjRfᎭmEG^5Z}?\@cPS=vB V.Z!B%TX Mft eE8D6á[J>V7D3J%N-2j:տQ-$'fDE>1s>1slZ[É`+NhYEVmUfAP]Ob΁IɭEs'j-;ZEu?4pҳF!JBCK'\̀rIMO[subq> v jhth= ܑ%[If5f ɏʌnoaFXuwI7\ƌ]XWvy&X^%W3#WƳشI/hԛFc3vj!S*{TV}n$p-!Vbf2B#u>03ޫ/Y_@cKhĩ{B+,Jf3 g3<7k#[+e5M!5.[rƏ<+~mIlhe0DB!rd|i(Sצʚ;X1Wruqځ<2b+|R? Rts3X5XyW2oO2|} x'SU I3-sZHEV+WtŘ3MkS2 G./m _,eJ6a 45]07=,7d4t"遠$O9: +t(J[,ƌE**p%U3ZޓD\Zh"PqWWҁ4%:8՚<)Б8bgKBD(numΖ9Xj%u"F7ypL+d]! 6]!+:R.U)'(GR[e!xgƭp,=_'s|_˚VNƎ!`*%A /)A*h4;0>zZx!R?@#)[X- $Aa3 *^TR&*E^T(F67`*N@6Xo1}gR%Q+ce{$2Pm|ۻoHf\!wo|qʱgpgͶ[o7gxK#}]kwb6AFS(pn(gK{7qyp=;Қ0!FfO8V|m7JjVjx"DIBޥAt߃ pt)jf߲{pMo]~6]~+] ō":sʹWY[!*=khQL;&2 Z9!emko eyޫeY\U=;n=[O-tN- ;4`&R>P&%tl!)5=ޯn|zp֓UC|FizVkncyruC+R =gNZZ6aauުn#l =luJc^1` h"U +3';Q2[rDg1q e6*P;Tӛ-*eI0.~quՃ74a܇HWv2oSmnWc?7^NmɌO}ꭺ.Gq E 5/Q0FT)%z qնW`V|;FSD`o  :] &9Ɩɢ+)a-t_c:ufIM:r,Mowh|tmaFiRe6.ZgS3)" wWm֢mSͥIgN*NéVakuц1TӜ5Y0*I4YWʝX|'[ TgћV̭3 @JiZrw gG㽪oׄRN7ztHt^Tſ@>Vr}f=JAtߕ륔C6;&m[hAR`(ϜPk(|a57 ^̢*c+D9/ڍK~ M2ߟ~lO!ƞeB٢eiL5I 8#TYY/zƮ͢vj"p']S=̊dw {Ϟu\'>da͈7ЃƃFmA`܂Z쯷m)wo &tQ&[/*%kyk=PʫEF]!MoVu˓%뾣wn)[`s*@1cXcLϥ z8#[@V׳*ĘSe ֫?B[E<2IU#`CjѽO>I$QDJTGɉ0"-xOTL"zbDT9)d^hRVuCJ@{|U-\`2Ϥ]3bD G 0J^KQrް0J$pQ#=JYJ,!\SZYzOlPƘQљh锟+5(];bU;! Jj.YDPǏDP| oPyWS]LLQ»~>ɊSFWmV~@FA!(A4XatZru> Oe"-#/=.r4$:˔"^n b@1\]妯yx1׆=QKZI-h^$j(Rg0*A[bm'%ik D!-u&v$ ZHkkӀFXb:]\+o^$}DF hɊ@-Ѻ+ R@hw6ŊdUqc0o:cdĨ5*z]mz"%GF}6O~[?LQGfB|Mua1_x| |Z>k'=~$Fw8ؕԚV$/RT#]3Ow֓)[V]Ds>y~OiA:爲&c3'L n8R9 ,b_)oZl?P {5J)J[Y!B ZۀQ}s"5[#%~mN,,3iyԕQ}EGb}!TZ+)GttV#xFYot£m:A%wr+.8gV)Dڨ2RT8?S."SDA,J"8UlN*$\ByGChfvQDW^`>R󮝊u-pe/5iӋYƬLƬ٘mFE3zy;vXC*ƗuznEBg {ɀ-&2n-ޭKIn^D8YВj(䎹Ć3@Uw_Ƕ"괊ā۳&u⟿bb:hvTQʟC73Z>m&"ľ /OjTU,?E|y V*_@`Ax6.=2xQJNآy eɹUU۶^M%N>|,$*F\d'lR% ^bG<ޒ?@OUg(]6\5^7@t]K(dDKDj˞RRh/5U+EA͜M+* uFnHyCis^%bu""C3m !!ʦ6[ZAhÉe@j% {B)B{ސ6HM |[>n(JC!4E\ũ[!^Ѱh][ 7+fl-?TGgB?e,VIsc'>5:PYx|!G7C*n <`?V{1y.!%ٺB< 'wwt#ctna|~Wu|Ocw3yO&) S9 S|j0mH' J)kDԗ 1 P1)Wm@\^$N5nY>dŷPp i|k ^I㗪LV`)GaJp Q%jpd;>j Ӧu%w>3T=i9N7(DֶQv=vhJir(I'-}XƮIylњHOŏ؅wf1~KKLjI^]+Euw}Mb X8鴮zƘ -"uQc 0y5}pBDш' ;|q#VE/1x />h;ƒ4>E7f6? tLyϔR^ Kyٽ[6'M}ifq{?`)=sa^aD# Ehʍ ٷy)GEƳC?=\UoC)Db5<Ȥ ĊVg/PA;?AB.% zrC'(V~x^<zj\w#j {F_/?Ŏ_XٷI˧z2MTI3Et}WW JN_*gO6JSyURw& +PO:Q~$lSϜjV)d-'9cR%8&~Pў~򅃨˚-Yt$!MԂzNvLtOkl>5-9W?1IRɆ0 HRdg׊j0wnaކlN8\nA @ѽgS3;]b~ 2?y(K^x/2wD U =c<<rؐ3JmwT^M8eA.GxKaN/+HO+,vԦ\(CXLvO4P -%cK^y֠K ф)SySo.43Պ]nX @K/UÐԌB8zɅ8 yMޜ䤟rW[t lJPJs"dTcϧ(ȷZ&RFՄNARMZ^܌/l%2j=6S\X.GSE&bDg=[w Zz{jۻ7`h~%yFC"L>WpDBjT+3TH]Ԏt"SqjOez"My8pŤ=c\:^I]'YݧʤRkPݴW- XjJ'$6RI%SBʖ%S%%P1A1fIw!u/1j3. Cҭ1xDt႐ H$Aǧ*J C jdV˽fs$>i10 L)t;a&x u8 \N>cwa<7iMu$(c1IxѸ|n0 9ZyⶐQF$`p.ׯrQfRDnmHjFz,-YBR)G!It(=VjAc^-'n17o3Ki8?=j'T<"LAܖ@8@fBTqY$+L u,dg w? {P5Xat(ʨ0Qޠqq0(bT#y퍰L)]Ys#7+ LR}tnol_vBQ5kD힉(R$xb%d}Ly8ZQM^k[o8a\8E h0GlA"0?b^"u3B*V26RB?/ZD՟F|yҌ S?? 9c-aE9hMh1y%b_=]w~&0"ŵG[nT3=sqRd'ѐNXI4 r*ʕRm VU RZ/b*2ұ(:{m=dOvt{ 8v5a" a Hdb8Y924Xks&~; DZ i#* =eڰ㈁T[5H*R]J6"Y#Rl]Hj"Q u @BϑfSfVS%8@R&]ZRLi$J(Ŕܞb< Ŕ8\kb#Tc}5%RLUA䂁Z_:Sv9\ |*i rAZBii{. i1e6kEZ :$-ŠI Fy@# !b]y\sG̸DBvuv~YǦT7Erxw9O.h g| v =d6CyAczIQ{#fj IoʚzCFzb>LcɸCauG63uŖjVZ ɏzC3OwhdC6<4O̻hQ̻hf=r}4o|f3naƭAɭsk9 %(G..{v2ZbߐLxuw罳y6 7RLAqhf76k$?N%Nj= v;Y޹ɅRrP8Gg& }P BYqw^bu1l[<-ST9!utzCYZEOu.i&%-GgN-'!|dԜ]!U !T*L SsHzG`琡" d ic\Qi*YURpbK~stJ戼((AG\q<Հ7!#7X+Pٮ Ҙ |>:TD˙4ɤ]}x٫uF a0noSsT'+I2 .6n2~=\B[ؕqV+RqJsw$OΤ %<)xҜM(zzؐGȆ(z@㗔-X2~ၼ Niޤ իq)ծ1''~rE0]rki EتyvIl~&x93_?> {/K.R y>)Ri|N8j9iz:~}߁j|E*:xJ5LWvm$K=sTǚ$w.`0c(UF{Q?>^=>xOm¿Ӎ4»P1ś3EmC9fzw ד`B8 ]Xf˺> ^Ÿ>=5#WϧuqxؙILdY$䕋hL(C=n2puBVAD "T޴[yMLֆr-)цv$)pBVADNxJ]kLֆr-$o(ڍAnNMŀ"pZnmH+ѣeJ QM3;nA ؂E3&XP%U^sG8v<''=%%a9DcA2%l!5JY?@,T*8TxlF),ϥH'LdE\}U,i&E)|rsmc'SO4櫘|ӘbU<v #6bk|qa6׃Ѯ]kR@2 9m|L]Ojiq|w>OLJOߣ?8--?TOgNjks1" IVgǿ<($ƨ˺jIΣɥDb̆ן/}P{|YQ35P[LxCĖpe7I9T! T{Cwس1p!q_{l1k$or{bMZՀ=|h_?e<asa.5|M^cdWM^2x 6Ä6ttW;p`WNѰQTp-wkB~ ;YA9Nԗȓ_j5(}:pt9 c*]WÉt*Y ;NV:Ԇ%@,_ P$b2h6PJ2*Dc^ъRJ\NQ25)ڨC8P-/Ri% e1{̰qerűqU4Jo$g™ kRQ" x ބp 2 gA֒`+b:8CT DQ s4)rBmLH,҆r-[R' [췔x0)QR!RsMTIMhmCB^/SK}7;Ny%1b ׫u*1FDЬ]@ 7}ҿɌmə宄2~4P+nsWaI]J:nMb)KMÓZ4b[RQx tiD&[&밓ObX1_Ms# TjѱRƏqVTܢџі区5w Uk\/ CD NWU9,w1l&ZبL?L҉{иc{ , HAkRUpM .7#Дv^,F7{}cL/WqԢ#|ۊM8Loi߭n֛Z(&/%:yŧoJ!pH՞ތY`in?}û`4=G餠R߷A^3*<3_YMjj?%>#ޡQGWU\Wq^U{ڴ|T#: ;(gRNVB/Zﰕ R> C;uw5҉IN~ЂKL" V 1LR9WV#U:0c@+d`HJ|%p 72i%^Z.($ A D<q i&r3\YUėũJ0r*]%XIQf:fL&KW'vo4 dpp_v1nNJ*Nrz_1I2פ 3'$I 7O4+dt 49v!(S]8B* |b1aGJZfiRGej:Vi2UB gIäpX*FCp2jR*ňz+4Z= xg\My~4`=͔^?/IMA??KC|dG0<#mPε A\~]ScT>}̳v*hR;Fp}߲*1/4v!1k+ $JN 5S:QQLk3嘟 Ȇrj1r\!` a,6Tab9XRb@ʑ`*`r) |+ ^$|WD$9V]݀n@֔6x@Ll#'2Jc̨ %HĂ6XdJ8GDj:4rwj19[?jtkjhJ|L~PIZeyeY'u-릭}G0uŝcMƆ;frcPۊ;FǕJ2]\x[ $$ VsU)(PrKK$/D Qplj?ov@ %EbCJW4*~+A I=ՊBPb  ,嗆\ B;e'\ 'Ngi !S yJ//+0YuUSvǿ&ióxъ+g†^};M X3Ƀ-@^:[_Ý[LI&F6Ay;3.zD_DiG͑\aFZnqd1Rf{ۀT?NÐDID3g;Aw۠VTr]7{9t3E|w '޾@QL>ۖ]:dX%uMY cАZžeA~tȠ*U]x߈) U>à&7Mm /xm/} mޑ̼ mùgd/G5s;YX<\<r̛1orbMN<fh?IbƤn'?MO D;|Ѱz38XyΏ :bNzX)8WvC4qgڳ1}kDee&/9bّOqPZ m>VB#gjhKԩ"PAڱ6ReRb=Uz9#쨇@Pt.J0[IUVZ1V `PXü ˘\;b'>~R.n;,8=˱7d)۟݁=g=s DnZˢٽYCL4&]ΞbzIzPRIzug(Hƕh@CTtS]U¨}|;7XQH>uMAi&`,wy­tu qfRi%`se*t]XaUZ-!|SoϘqH <-&j uQᮞ*'=/ypQxLSJ/ymj]ɳKt?)X{B%aOHRHBu[Ni=Asa l r:bLS=z櫚*s%B"; bj bMΩ"Ern (h XXWU5BʖW"B[]|TYރ/L?ՕJ-gl\+5`auhRqf*W"UiI & Ue0j NOdRHY2 EhE%QT+Y1Q0=bKZ1Sdr5"fdldY?9Fw?z%/F(c1h(E[i" 0!b::~zR>K8\W2\ؕccȐ ٝ|(3_̤4łuy@,?\dy!QׅdL1" {5: LsBx tӽdnрHWESI$xk= zXj!OuzрbnT4Ȯ]"ęCbcN-}{!uW ~3au I4۟) /-)&!iw(p*Yi_TN& 6PoDϟω;fV;*[ rxw:Q` S=;H [)NѾk .bmD= [і/GYȊɮ;Q؜]鏑]yW B“ܙ}2ewf<(m^uδ> u iGo ) D !5 # a\8 XQB8+we8}|*eR7B`F%7Dqf4EhO^ƋJi MV[%'/%e8s0KO|ڲPD4K9UCM*aPgfJ᮫g2mSq"$O댇 !  r)pNwj5oTF?rC'F?~ƗKO㨘OoG<-mo.BdOV S;j%RN(zr:sZ۴"` ]}sIMloX 5ՕBζǖV6&+y$iTTa`~کGVHDZrAAOҸC(2L [ yk0j6ž(/@?4HQ9\dN1hTI<;X z}/*8_q`d;c_')jWbop; kpYξv_#E.HchHZQגqyxxB hxrfҬ]F!꟏zW}m/-7BoѨ9o ~me'| p:Y PE`lL!Y#A3,y!H8D|CeԺ|,~x;0OF_nk@oG!~1bLF -1G%~򢕚Vos(qHHEj.F^Z6p\:&0&ս8хP@NhΥ7HR"LBHI)rn 3Ȩ{)u:{I!Q"4f&I^)Mr M PF:GIäY\]LolJȾY}0yE jyqiv&OH|-e,D9V9APHFUdHBKg, {cGh`s?ccRx~L TئUUKfr #dL0LalneJ.:c mE`]ZK#`p cC'T;o/ɽ,m V#=HwNIűOld1C,w[e5'p g))X`E@ jykc#/TX#3=XqIc<ǘ,7Vyp9MMv0bf| btx\SB1^֨5R,k9,HAe0Tb ;$O*v]JuH_s` osO\J}7_P+^||xE偕qή?`. ADdgPr5wX˷|LbZ=@!v0kgۣ۫b"%ar xo_KVH3Tlx]Pbˣ&1/=ߩUךFCD];Đ1W }ƻMB7qӠ?E5c rQ=z,Q{XG|>fmU*tڒpTq¥fYjjv-(c|fWj3r~a{;t}u+t+nyekgɗhҚdsշ/xd[*1K'u&"fIJ6Vȟ|&)E+zAxk<@ ȇ^'RH(Ӑ'rしRES ,-ﯓmt'ӽ[khmj8 jSGP66Hiŭɻn7`!Ud>YXsePǺ{gf!}sT"p뽉{6@ כ8ᎲSL4I伷]Q::u:uд(H% ,;V$82I-7ni2M5mH9b"hbDLF>T I+J aM$84Io7h1!d'tX>¿ ZY G?KBJusnCs Hƌjr:q+R)61.``x\<ә1˅PkY*az-ml3[s>nQ0}v0I3xd(ϠLvxZp=_OD35c<vQ~'LXW ^U2 DTU?ƞ!93/̎b WQ\!0eg=R+Up\ew0^_sw\UŜ"yTdUu ՚ [Sm JsP9y.cV\a&KhhpD[M(joׇw?Ҁ2.p岩q7 ƼhÜV.FT*xNTaxM0LHB!IEN(Cq1Na Rg!J+WjFF55K S^Y9u^f@Ϲ)ԄLKQ2s.*'S\9IzTv/) P Lʪ< )K8)+`OOK gK,'KU`%|J^q!׈Z0KmN%gRWQ6N#ٶpa7_~@wpgLi';pHKFdBπLJJSI*A2#&.ƒlBa H]]ApN8B#ZubiepzZ#.*ޢ( Bulwpg0w>! 0fQ~bITcӬ%o?%%_NY(ܽ.U\NJAZL~m,/c<[xt$*bp$o> q` 4S&>g8CDN$;%ȟ`N[&>_ -\/}[#z>.k(\h"৵ gsr,̿=pN2J^eT"T5첝]gE/7| :?|lsSdd^h-DǡR*i/0m+M-eP}lcw gĝ$5! Y9 J(Yxy{6d+.~3$yE'S׳+EB +l_Th5ӟ3=yc؟olbmy)ԏO8Y5(eՔ2eџE\   Ӿ!ysQ$D΂9(Vѐ#p;ԙTo >Z;LH.(D 6HifyPH9j"q8ͽs dsB1bFOTػV^ <؏>oTk!H9mOZvO=L*;8˱/XJ13M{@IS U,EM,4QK8ɓ28ݲMÖ߲aq-TF쾖=4Ь:RH֔gi*nw׿;uQr +WPܩ7"cZ70n=^-e նGM2\ Qy+3ÊSЌD\[4QoԵ"iEu_}2#]K#B}&yl"X.RnjE0~Ji}7c0y:|V)oeX:{Պ0 )Ut)jL/fFmUdQ_[i"IfLJ;\.i9 lAELan7IS:}c䣕QH̙ ͢yȒ 9qfb]K6 fZ5wnro88X`3*W55rbpk*P'%Ղ0=jYW`$Ly^*5{c4 \0GNxPϺ(X6cḞ^mΎtDt#N Ɣ"Dg 8B:IX؂$,IaH}5*& <wXq_f8!l@+~x\ܦ<mǟER3ܜ- rBSUe%AɒwxiZ%y:q5D*&{a)#j8_)IYw69w'ǃ:i-qf5a4ᓥ* c[k] 9;r9N#n?gkVoKA0ȓU'yFStm32r)X9:$! Y󔊲C-WuJڒKKm\}U$de>F_}u6&WGJv:Pzt\"׋ "qE-[5fЭR0 O/vQ jU69 @:^`JgG:"pǥ F|2p$4FiK4VG( >J锘:],Vp0Zsh!cuhKTu%0z]6cC ذ{ ^jr ͙*\,!k`CDG,/ԅ AsV dB96D|uj;#6*.,Pa>wgD~`s9 <P+wlnŸdJi,u儵T2.IM: #?5-H/KJRF7]9"M&:?tf=pm8% q仃N;0 f@|rĜ'uƍOxw @9'r 14:?#6_Ԕ.4R~ IT[ 2#s^8CChmJ ćh@.]*MBd|Sdѯ{x`*DD.燐"]="aEP%|_g=C7`{&G&;,z|$m]~79("][lۧEMTwV[bIxPI1f!'k8"5^BՅ foUb&܀D*ZZo5/ ejZLԜ; =ߎsZ8{S[Okէmwe[`s/~cӺm{{w/ g;un:)bߜ= WʥzŽ >7M^h9(wiY[^8}~ }Y~ei;H< lw%{$)g{?n<uv{vK)m`ȡcsҶN<; Zcr)0ܳN R33NcP퓬d %XuY{"6ѳC3VXȶ_.Zӓ!lɯXd$vUCiNnK N SC7c0DЯ0ًl6wvG% Gss<G"J̜fl͍gُd:{<ʖ2QVrͭ6v7?F B\،ه9M5q@=PhGmͪa÷P^ͺJ]\7t.nj,z Me8NЃ/z[w[A˲Tӎi%ڋ4NXP*ń&^nu? ś;5M^=×csPD> P~qv D2>G)3֍%\n}\.xva(rut^IrXi;wi/ETv~A(㓫RreOsڌ/zj4QIAՆVhźs[ga6;v':/qD<(gw] zz8|Ojx= 'j=yg6ZW.:;=.uXw`6p`L' ^v{ъvbyo'0)T457-VivCz}Y]X|/IWic8"# n0_AwT0=-:W aDmXXt~r";@$Ei2O۹wVvq,:<8cW+:j^ܺs oN-|5>|j V.4>g?B1Iw_4`~JaHK~W &n SkF2 q|TFwc7i<2G˅ȇ\krh&"f7^CM\C0́nsO Fx^IJC%Fc9)(+5? ls' q~76UkeF>ԡIܩyV#ha*rE{@OeCO߷*GͱmC@v"~TiO)T{AVj2d{G'+VZhu%EcYм6 6.G՜ub'4A-sɈk3ڈA1" 3@ˡ4̅;).jPD@ۥiNK(Z[s,];?O53;pp'5 HFu]~V t t!Ii .@P#oS\GS #uk);Hg|B^(Y1tkL.n,;P-3+ 7w; Z}<[`g%ϷA>ǘHK;$Zt*E@2 r0hiOS$Zo$x͊xԖň,[`X#8kT5fO~ek}gR:Ԡ㪹y#X"%onr)hW=q%b+|wx֭Zv[R3d@rJ"jO\WV *")P`d)~@[ Ah*%Gh%iIl H9 s%yAy Ra@Re9VG')nn9tޢ="#m=7ge)eAdKX[80NSGoU{Ʈh'[xԡ1uTSYfv6Dm8NTCF#9{UWoufPl$(Zio,ɋ_8}g[hn>{9`r)f1^=\*;' cӧKRvcp[UwO>{)oվe/XDpsNE<ӢEԥy/D7~{}+;x(pvt3gI--&[%C.?M>D"ja![+Z #TѕX`}OG8ӂGΑ(ƾ~!jMc&RXwEZj`a~(v2(g˝_u_EX_߾:|61nx5ooC5A0)4 X6;,WJLj@>oJE`ݿiW@  k{P+*VS-q;+c22wRFaᑭbb X9ؒO)-k: ,Mbְcڲ s?)7F_liԻg޼ WY}GcV7UX-oRw=;g~|_g'? Wr۳j\xbvL8nĞ d7msˣqOḵS"rfoS↳/ٿjWp\8~yW}i ;ޢm@hlgo/n2~rՋϾ8 >__7|W`볯M/A[(ЎE-v:c-qF Z̈mYݸԊ@>Tfs !q*‚K)r-Ÿpe‡Qعx6+5[J9y7uwcU!, i:Cz%vrzCb-Rz6Qy$tmjŎS7 ޙnľjj]2D[ gc-X:rn܁RJ[L$Qz3&wjv-qqX^eB`X!Ւs )8|T}/d\8Bf1~aڷX $VBybˋ峙? |b3s|ޱ g-ӠMX Y#qXખdkk1S8TW8\zZz ` /ٱd`*§`H"*K.@?;/dHNB\~*k魁 O밭@WܫvGN>91ͪ䄊=) ZКshiiZ?Z `ǜi0ִCk׻kL shY:-/?Vr ZĴA˅E OLzq8uAsj$=)[ݓC0{.995{ O|yyh"sbgi;6tp{mh ْ֮+##gwP:Ē]R# i5[ ]XQ&-WK"5h?*XYȽ OK,Zm#KJ̐ z583K$!ǧ%@[v$pdY~aY/fΙ`GT] *) ģf6tM>O#XQ>&GX cΦDX҉aZ'p,XUQ'%\j(ܐSʂ HFrأAŤ߿~fSlc~/y&{p9/f1 ie,3WR )Qn͞:n]I\PP1R8"3k%IEM5&E2xDtٚ(ϔ,I0c;3 s+ϓ74M? [_p*fYsSsGx45Hf7v%{$fruPI%*xx ݎ}5Ul6$7v{) L& V0n|5HS]2l_1*ʢi܁X{ƪCK%Y Ҏ߿S!22ߩײJ ]R ŤY^1e^`%QԳ`g&u4*sZ2a#L,4q -1gJ,U]06t$#}kZ|5.IM6@{(eᒋIU.c=\mW!tr1[Wb(r5ujATA h-SRȚH3P#k$)OTfr=բ\@Z. LW*0%BQ6|<E3^% m;W;KZNH0 H,'ŠVA2b(PfYtik>fz78JGob+~ng~DQ"3 !jaԧKyE8=Æ=oo(t Hz!.HA@2Q2߁5}ޏ{<샏H~>4]Lݟ>Uy5V(:FK˵w۽f_G^<ׁo0?[=P0d܃3;q=喅;p0BH@ZbyB[? NpM$8h8fq>:j_pPdq FHj h\~Ne?oן!-:;B9\uN̨]nmNo` wQ*}oĘMI z'yV?A7b?:=E'N0mwaZ>]WJ'.D<8qW e@DRh7[gP}MG Z%vpz,a4F}6􉥿柣C?$ \n0{TdT92x FCrw l`|G *E<(f(| v/jS/?xxm{w?9PhoV~w"?z%s<{bmkY9&~M?u" Ϳ,va$x3 vwayaڋ7tp0}~:}yI5iXkMD`V?x (p'8Y"*'dƭ˰ˢ5Egw8Oq@̠V׭Ȓn=9Q([ 0ɟM@z*@O扝~?`HUS+YXdb :d6TMlq%>?PzM8;σq8D3ݎ}l%jd ϫ'<~^*]ATJ_IXTIƆL4 G_383LJ^cUaXqn=LhUhl 1a\<KpR-iEZHs[jp5D32Gz&DM[Dh uU1RvFrQڄ7Աf&qVN5y,`KPM0-0Ӱ#Zn2e;fʒ81x$RU۲E%b$i{BL؏rõY7`9P^P[L%2!4? xYC{  Biv'"KX?@^V< M^9 sFB^}Ms_I.Y1MS=!{݌;Vt;8(rPEfj{<T.ntP(+8HRۓ> (' ٳBw]qą&|{zG{\_˓;<= a zsrx1[V+Lm{uޅe.:NCywt;,xa7ap&.ݍM)qSH"qOn vhz.O`i:@"RM\5+!o dﰭd`$)[4{ӂu5nss 7W+˭ٲ5[flٚ-[ekۘ-e326V4ߟ^GielֲZ6 o-z2$mD ֫dec,\RyDkt1fѳz-\0^&\봔1[`}8'AϾѐu*>.[!l9E/o%klIR\Nfax/59DCkZt&jlN3=975l()JRHhLrC[v i4\{8ג8\o{nL9bl`cCq.dVm0[.Sx\"dTFn%༾^O'@n_;'^j? y%OaJҕOݝ]T*K6Tn~ĕޡErRR.8Jo)PQUn7ǂ]2[gbA(9,^17 ƣGm< JQmŋh5M7,I 6 d:Z>{c(9lvTG0Ǐ"b$I$š`Z,`G,;${**Gbhc+y N8CewCEYDIHA I& EtJ_IX aMTu^أ]H^5ջLklpj {S Ъ ~ $&!<\u✼k+DͪPsnOElJCUM {nb֯K@aP DS][Gw_Hח1"f4עz×OaS e[zr- !&.>>7>N3C\zoj:> |)os"񍣿glKP.ѠzOavz=ZO%7V0~bNM tk-3+ 60LUTg42Nh8/Ob]{r~o~x>|1)pğ) Sb 0S1u06EoIyo`gy|3~(|CPfByTUT vyX F,4ތ}#-.5UlR.S5 ,/[M{d? 8nQ^݁C)ӓ1q]\94%fq=]B[ ?tn=N]He=9hVY`H5Sy.ܾ4Oy}>ܞn.~Oڗ||1CvUSv:@k)f쟎Bٿr+*H<=ߖNӀtrHQ#!: 3$^v`W8Fj*ie-9jr }BkK-ӲyNg_ \&fEIZ[iR9w+Dku1ĕc)4eϔ@4)GMTh-41Մ R)ݘ[=t=]ε%#x էWJ0"OM1LZ"!f~ׇ8jPwZTT2)AV5̢vE=,z6g:Ž5Zjn%SBUK'~kųwՊu:TG:)^8I^&gY]RN`=jFFF[?)sߵV17;QHN JY?D5"d6  c%GێaB^ǀu[Tש'~g`i+" EWHXL<8P))xKX4P8:) 1MǍf1e$V8 '*47(KqF![GDrne≂ZSdrdФ($DBt;I[4vmRzvzPvzctɞ6jrJKI=$9SAi4"& lD<҄(P^⺓ MDXk 9g&b\].5hQg>V!&{Vhٻu'Nm8^ Skĭ*e? 3YG+'Ь(ТaDvޡA$5K1=%і@25{τv2h MD,k9Y{P2U۷߿x/|D]c~r槗\?_>5FAcjBpÝ,  @P $jpri07#!e\@+%jӁl1뽸h/WeY)>dnt+hrJ#+jQV ͡ĝJҤHDœc 5DCxuNahCi;6Ed,do#4{F=EFc=9̢LD/dbZ>m,ɄeJd1lȷIHC0\癃@ RnG'3R4EK)+ O$ƵdTCІմ.+},/Wqcy#eßt9r~*kqu!/WOE`?!#-^Bw$Z;Vj/U]y;y?nqw%K ]`܇%=<: ]`ܒTo5V|yƤ)AjsW!g34ZsEYV P<7wB>36C Y9U W1O W3O@"X\Gd];ڑ*DZrHZVۂlo{TNҺ0ݏ ~cI)w҈RAs-sH#Aڀ8L H( xSC#~ ^[󜻃CzzJ]_?=;.^ȮuIFp&d]պĻ!U6$-PVRVkG"^6knAh~-tS9$+=PDk;j5k}Ccrfu/䬞I䬞I=gUMx^9~=z}~ O,fCn`w;}f/ ϟie7\fLrћI W$ΙFaRopIuPIBAIN IN SBnyRr&-S/' rCv<RΑLB* zX$T2@?Ab7#i( н{>XK/e7,謂~v )I(( (t4NфL2<ZH(7AH<rOTB֫4 OHv^J/MKRNm:MQӢE}q!9O<=mxwkm۾hkAp..d"d[A粍A qmaoEgnP;sŗ$%Y#_V>n]^4x`9zӲ/7QӲ7{'?-)*D=*݌R,xKѢL *Sι9::H-}%]h_P?FzpjX$l!O^;Q{:2,.N9.Kp$e 6AX]SoY{mwHy4Kܥݡ E B-oAKSIl9U[{;JJt5y$z( +TTAgHig#E0Q m> 3Vy|nfvf=⻸Ba/_ܬWx7[}XW!ʉS9qHK|w3z][hy羌I)mtHgO3sS3iteLtB(?#+Gu$H6)2cJIjxICɞ3ݿGqk>Q Xw=RX5TFLWDZwv0 wg^t ?̀맿ޕīeˉ[%pVךt+ڴ&zs)R$ĥȘZoO?.'9'|iE߯7JͫU6r>8,sDAe9P ڭ]֮(eW9槿}g¨x%\3P+k%ڲvp}m41.L9nk^8{ڼpsB<3Qv"R` h=atX%PVahN%h=5;H-^~7SWn!PL4d-I"AQGlXqWW~wt=PozH*qNJ9oze&#nCNDA Vl{=ZN٣4ɪC6@)XeO'_f$NtR,Fmd>5[RbZ5ib!&NXbU䠢sRҌ9㨡"\F!PBRkkf͛^Jm߮o L&O 17bі|ɱVok0É6,✂p#*@v+z]HB ʧnN9\wrIve^ |,_].gfZNB\uWtv sTi8iZF=oQJkʇNU!حFq|bfnALZ =NgZLdTad [RX[ݣryvjXOׯ=S^itiW]Aq{wH|`RR&+zWzp̄WA&Āa\MjljjgAW7.\;EV/ xU`V\Sj&wp78b8w&k!Ѫ 278UE0& |<>+@.Ur)|"FYYJ+Xldk"')GcXSC/xx$Bf;,W^vM!^wG딷$` %1 kdZD)R ]r#[#  OL:ZI8E [aZou5'ta<0GO(*|< `͇+fiGTb56J<ȃf`Av]X)gV1"(>&CmTY HO:B?|~_._Yϖ*y@ÉR6Љr0 ܴ@Mm*PnSm*s|*^ 8ZkӴLkfHiF N0QC`ߣM[]fsW ⡼ g|qƃ#3! [WN%\(W E9U~R`#|cJە|ϟo|bkO0jvn+lRʞG R"x+*x|V%vD6g4"{>;z92;XG#OcxÙJ*O"QJ9&x`)EŁȼU 0lMiJ#ڃ U$lQ-9aBp5`B"=Ln32\ gŖ3'H'Ɂ9w=KQƝn; EOW;< yZȍ_B,i"X1c" T2!CXGǝT)Rek3|׻qEhyǘ! <1}V;=4PbQT+]_d,ފd(L=Ĉ ƤȴA{梃D %,ktnZ}7#$W:iSuT^s:hs) #0J62O,y_4xa\hp$J,uNRG U[jz핵ypF u`h#@ UL/[U{\LYpaAM F`D:#HL$**5OoXJOj2 7j6, @[3pVX!ʜgD a`ӘHYügA9!LJzoIQF [Ma(ǃfFc[VIs4G?t;E;;s)'6{!P#dҦ))%oZ28C.2&aI#XSp 2;dI.QFth/2hSPŇ8de@Z(YcL¨(qg-e%#.`8; \$'82oHtє Dke>T\KKඳ(}_wP_p^1mH=˭YNvY4[Z.> )Xp-h: ʼn뵴m?QGJ\vE-1ePGr|FN@ ZLcRXR^W? )]jrZMvuѥYJT7]`op .eQ`6 A젱Vy17pc$J,ʁA \6P wR tM.9fD/ͮboj3 V+y@XcfDe !n)6Fl#'QkU}\ pӉGNgeMdo((TwMR~FZ' +)AbJ({坖a.uxLԐx ߗ*eU[[OU{=sp#8k;qV-%㦌Lu̦Ledn/]bEc&c_'؛bcC+m2Mԭ=먧WɂNSvT!bq豌$3:!cn@sggQJ$1aݐQޕW1m4Ewpx$vy WBJ}jO^_L]Om^6>cႢ*oPI zxǍ繭GHBQ#aA>8'ižsΑay?4{}F=}z3YpSyi26b;5>|dA'iyR[a$u'6.\ҭ_(ߒn.EN,e" tz7t!=u˱`>y}6eFc/?.㽝fI)>`2[_<x~7Iq/Żx{1gP,:,5 ÆX5W1 b*:r XGXϣrnX'YQ>fűex,,z? Qu{V=ʬ6D#Ie,Ycqƅdl` C*1F.b5]fk[)%yEf_zΆvD RL\S̊Il{'~z<U\2L6Uʆ\S rTB/ >eK]+JD-SVsB 2XV7"ð9t.0嘬+ 5]Ǜ1B(%h*NQ1:kzw7!>toJ$䆝Th}ycHBCTEƒ$ڑRe4ʞIȌ17˂JauJ:1ު>vF&!Ddwd<$kmo4hhoanzپP-/2h}fĻNB.6{0! ;nj^lܫ_eEU;Z>dS]y<7>w1y7) '߯ss7Lʓ׳C ";uyn}Pc/ruR(ҍJ\8h"@zssډi4yAԈx vB3w1yFumʧ&hjr"6VC+FJUKr1<$ѦM.Ń7jWh8&}ke R#Y@%4x!H X*9$+:tÎt^P@~8LzޢbMϲ5JQ[x*֢aIrTnŽ \0my@ny^TTGÊv BH4T}tԃưQf¦ 90^ ݆s 珓"?G(ldo IVFgMlzor=7n=pkJK^,US;.~MPϱYӐϜ/rt~82}ɯ>ݍU+pP[d["KRVn{c-PLl䎰s%%t{5d('SKWɴ= 7-KTܯ_Β"#SAŐcA#w mt@cĠ6*BQzIHNY\H0 (=;j]5. c|oyS|@X1zVDq?,8'QC#lA^YMR=HaP ysP϶s7).<i qR=T4*m6zr: nR_E+spQFmbUJ\_"5Rr)F77JD AmqA`36|'s m"ʣ(t>~Riu :ho"|W/ww>I%u޿xS6ZS^?m*~vU㶺C$1f}pܶpF  pH׽x/f&Cx' lEzXlŻr֞\ I :Z>9N-c9oEp%vrn헣S}sYM.yRH%jԽ[A/cVp#ۍǂ#a@:[Gi)Zz=6KAG!STW8HG m߸tP9Y 1rs=啇FnՑńG=LuzC#Fn}#vІ2Bκ}2"Mn&Շ`UG_5Č zwx5dڙ>G/rc8}pxg<]䛇8k?jSxN& i-ٛY|?2Z9zcy.g_[|S<JLXh`:;g"}t!PqoID ܛĬN:4 .ZƸQk] LEBaTÁ+B)1].(J v TZ :w5yd]uNd'4m.CAƸ5ς՛й% !PSZ[.z'/)[z%hJD fg*s;wH -h4KtZ F!V ğfѲxu}wZH+k_p{U:6w" |>PK"RiT*ZeP|A$ύPHf`. YhBIdnru5[Y:II(o/ڶ> ,J쭈s;j,. ? &-)mKEh퓵 }kŒ!f1v!= >J\P+\RQ0n"8d"@p Q"9>Wag:CȕLN U5U7byBuKx?B PQS~ʮ<{+- 9$:5%AEK(M`X #!*a#IKT \ycY=oRƘ@[3_[z2 MJY"N*C+!/)ng;NF鼧˝d!bR5o^bKM;Ў M,Um--b=g+4gI h4ha'ALNMM[ҽuoY-p''äۢW7K;__-#vo_?;.N}z޼ 2T9,%6IOQ rDabAE\1% Ƹ j518jfR}1m5S7 nxU!.L3)ciIv҄*t}BNx͊GJ™d40TPM S'lj HtR[iϔf9u(4ܕNR5S H!(hʭk,%h6%i8( Fp1*#\{!/Ih$u4$ƃl W?)Ŭ`ͭ*eaX F;IܳP79Ҥ6)-|~}gA+BL:ZGh%3 (XŨC 1H@UA@IR\:s5Toܽ05cAf2/M{$(@Åzc4P 4*@U"B8M]:'ǨTyYᥪ:YL)I3EY!A5hEBPNYn\oIlf~o5VX\94,he KpV{J-Pfц& IH\zWWrOXGu'yrP !3.jk1BTfm^=<("\Ijfǭ TL 2&sD!Z>B:'TClų;|r`#r9xW4B5o>x}ZdsG h%R|DD&(tQ(CLvtYnI[ p-4`W rzd6z/4j .d;E=漒r$!l1s \Bq=f-~/\{ATz/bL7gnia? ʠTfM So$@>d%24I\*x JƖstZILm*jiM8ɣ`a!Qb'=1| 8B.h\ v_Rp-!$[Oq/c^ܦgؕQ+q V$V6ږHeIpZ~YSe$yLvA}-zh/U-ҚgSx/cLrE ,/ wj\%,*zN88{ihrn P)'(ٚ>GR")әUEwWT*&W5fQEPOQAm-q럼h[zt9M <=ʉWj#J4UNz@wy9$̮nz2' 3F*jqFXh)U^Sq8FF1jXה2/am)7 枣_rvnK0_E鼜Z /8^h0jxLaɢ7ve+n7lT F {QQ2i^KNjV5rN lRr]9L*UK∂h*q=a>jF7 ~Q3j昽9({}Eݝ}#3 ]P=}1x1SIXk4?^M֟)oFèv*d3I֚x0G!%Exa1IVɚW,d;G63Ywn0` _6laM{ǽ_s( Y٬\sR<#`+1ĻŧZp9K*8u\#$57ezX)= ]F/J~%(EL)xZ['i[ރjFji_Ppa(u;I ȇtLj3 Wag5tR+CK;Eatț*aҍ"X_cwO|l׼ϳ%ЖhS4mc٠m\U(m߭Vn vC{6-YlEbqZlc٠m\e,[j%oc'Ù1JBBqh(+~[pόQ4e:Kby s9ĉKp[qMr5Kt{/ T4V#SjrGӘo5h׼/MyČMjs$]{h/mQ\kt SU"QؑLHSEMjs7F%V:aRPR@GI͂WW4GT'QL9R /OvS!4C XK:"D yĂ", L*|:k& ̳. 2Bt_mDIv2 T^H.Q%Gf)]GLh[DÇY}(qfA|E\$.)$J/9)Q3L B.LN|Qv-O$`3c,YpD9-눴[:QQGaSfRJ)ٛRc_ Fx2)q0O`z`J& $`B*R`yD.am[l{bD5ȶ:?*VY!9ErŖ[1[ƭh|< أxovG-ǡG,Vd-z* 3•# ɅR`_k-%>֊|2 Co%+qi* dˈr=%4qz j ZFA k 'VQZbϷC=E2{y":z;BfsݟܦukisQ͓;˭M/1#sIՉx6NpU+&q8CxϥD긜9P*D[ B\8}*b@wM_N!6\ yl0Q&q^rE|m*o 캦PUZHY5SWۄq^i+fR9KYI!gQ쀖G3ȽM֍wVۄ"/RX9 jK`05GuŸsEM c9h净]EbGKHF[e2 Wkc9zфK#4c &JN=5Vw>O.ђqmK$NcXubs Ov_2X^0j4NQ2U7ql$DIi|֘i~x/}`E-Ƃ'Nr(Nt=|RVߔH4 46)MpDVfϥ(q\ h~3s-jBfBFg(IzRC))H^ m_3fT$) /!%!afZrZ󱂚:ץLHBG4dԤa'0= S҄A K^w|IM5@@ԑ`А·țC(,k6P[7گ<'?U^=2:+XjgҼ0KM0] eX P~?7|\sUTJ)BN–L $X\fb`i{0ULC_~qw WDN"blMF. ɏ?yrOB3p0 ^b1_|p ݯ~Sp=)s.U8E !4ޅ~{Qyat6ڇ #BL0fBtqs5Ɋs%j)lp^9J85pP˜Ub-+DY YI0 X({i]sFp,8]6 M6M`wsU0/ٞP Ы ,hXpt l2 RJ$UZ`Ue-2STQu4[e5{DG Ĭ3k޴G޴PMPNyYgf\$Vt-BDnΚ[l`QĆǻlQ {(c1L+se6t `=滌o=r5yB !:߶ ԫ_-S-Wz 79\e:TqWzӺcnWC_{މws=X?~T:|Gbq_. 4R㎶J@EG 2@JN=r1#EbB q:n[Dǝ. &HBQBV ,@j'Amjhҳ/j|R}>٫/V~u96t?0D6$Ona'x-|rŗ4Ͱƴ's;o."N6uj_@8Ae=bI?s?gHn*`)+YSG·т8-O˂ĭxqK + Dѻ\;6;^שI"FJet#=ޞ# #r..PAgJ`G'ir* 3:2V9b Pe\d*C!0gOG`!rgvb`͋d\Z!|lbz`&~x}CpL0`<|kYY,aLnj~3 >d 7͈S|.*4(*#S2BUTJjS1iA`vY\ͯ_ta<Ѽnf/c\/Cݕ/ Ë廫{F~Ώ'_Q?~1 ~^A x}yC뫷8b< NYXsSOcKNV}N]u?$xafuy=y;`&:k&x+f{\?NM@߿2|+TQn(yAS jFܖ~{X- :=V x0;.ci)S!Z7=߂:QGWs:2ܴbO ɻ5˓:< z=,@b_d8\mw[jM{b۷߽L'ٿcn۵*ڜdZ ٴ}5A-_PA{A^q'Kj>fI`kWfT #>TPwyS+_ʭ_ox+^㾙'7WV_Ԧ^Wqᯮ8C}f7`}^]!$HMx-\P”C#\c,0*.¢s\YE 2WE)ٱ֍֤ J&ÙTغ Ukᔃ%N6^ N^|@VfQ݀t]iS)GpiW, fуL8in Pu-5Z`ZÏoj?,,nk2/_tư(9FqsVo|#`%GLi,pK6M<@QeP櫿:nht=]w;D<ד U;S,F PQE +;hh!3zD am)7]cJ{丑_)4qyj^[c>$[-Mw3^[}YY73y$eYfd5 f0~pc7~ۯKo^n.r7/z{OMo6^(!AB}  @KȻ Z]ʃA̴v7Yt,%&#T A4"N-FD;W$p7GAI@:USn!LLUVB/]*B[.QI)sZ(*"Z#d&r竷Uݓy.:ݣ 5hxqJķVw/LzHx"QS6ʆiMҭD3O/>hƘfz;KPe(aH5{ rpfۇ` @ŇU>R,kP7!Mc@X'!=X<^!DÀSƫ+ (o_3!%yr=/Yo)4Aq`/A5hEqA[KӋ!9Om%ß\|@))Jm~hemSwLY|H)OI}s0iOKFT7p}CzHx Iy:,j$ly[=q(|ɦՅ@&~ Hڏؔ9{*qBq9S,$ʦw""$>L7Oy,BCVRl,b뀳* mcފJ@ ~H7TH"axӳ|=P$:U|y a09wAQt<:뎛̚19~69Y@9XК$$7 }>e(euG(Aenr"QBAvt+A*@ʦ(-F˄R];>zƾe5o[O;Q Q﬘ &{u*F,o,W]|ZK-lfPbl^|Nd1e|RyoHc"7qǀ/'(ȽY 鷽I]@)bZ3q{38UtC@1"e/BŅyx$A%Dٶ<[s7±;^1Ѓac3WWYM[] `$I0>LW/糝D#oOPCb9ʸW7̚mM8k=8F{;"_#T 7'ޯ>~he6׎𱅿Pf)e3>Ov d{ I@;s*'8r^'Hʦڻ.5 ^Lhe`bNqx>6*.g R\|CY54NItJZz+ r`WuO4fp%3&N:i!  6$s̍k`y) TB_n(#۽^RܛAϋ{+fP"XYcKF 4Se/0+$ LvY`2tVpDb)t '#oNL|N rek.q'8[15u 6?/ omW1p5jh*~s|H`k)ɘle9| ;CR[/G{g.s?@,@ J@:ǐ ۻ8#T* -T+GUi)1QDCn ΘaP&,'V Ls(Vb BZZ#o9"V{PHXZ9uR 50<KRXC +418h*E/]K=/A!-aSld]44S @@IJ|vƏ]`0 ?qn, Vд @k`Gq ?c:&~#|GXor>4AhX8P6=SK87~ ďQ5g{a,U# l~ Bu/JXI{^KG󌹬uOM?jp%/2P Oi:gQqĔH1=AJ4&!3A4[$ Q.p|?Фc Y;[Sy8h\1nBka @[M/.}ΩJʨ(08!y=w򐔜 k}U7!Sfߩ^U-bh syr !v*ޏk" V]=R oft:pNz6fW$MGE r{MuͲy퍹^YWf՚`=KC>-a+{m-;6*![| fvKC>-mFi7$ZZR)wn%9:UDRǸܛ%;,br퓼O|N<͢C٦qqM#+i_xl"I#$Jаt\=vf,5{(.H2,>9?Ս қkV|خ#27c͡eK岈wtV#jxE=ݡarYq+BΒWf+68pΥMʖsg" `|xBYYa@t9TY1(֘#k='. -SX7T).#D`r<8ҏ^ j:En7ܞ_' ;*Mی|JZɯN#KlبԆ'm| \yxHnyx=fl]^F&@f8 "^[*]n-Al>ݝ5X Bbc~0c $wA|;ߢ1{ bknl@O9A[ӔB1`J\4(+8(|LOVp|t$@ttt9oK> 8FfbQ08| HޱƘ|8VceJs+S|FAyZ{ڍ*^6c1:cnylx[BC[o{az!W2ܟpC_wrj,bZcFTq2ʹ0 {"sC!!fn;0S{>tGw/lZ{uW ߣv]<荘eAõOx嶹E@UůOWj 6)薢;:(vrNl}!ث]hѳ,}< t@ P?Փ?A# 7}&K~YO- Qw?Yg뢿!q?ӝ}~$ s/~RSTN棟/@ \V7z7iy |y4iư.MZQ(Yq+PC˙IXr&eD}DA8":i:Σ ,;PU#-$ccSt8qnW [׃tGtJl[qXc%5cj|ceʫwR?J}dz!@X. u:VxWPt4GvT1F(ϋ~o.5IhpҰMqI /.-0IZ;񌞝vť;’cV`&~ @;0"dB5||) ~G!*lȒB ˆ"- AUɮx݆7c E٢:ph|^瞬x06_䈇9Pb"hZ;^9Q x({?H4r,bf8‰rbQ!ỵBoN$DhHB%DimD50H3D.V0|XĖ> ]%ڂ786nxѹZɏ;b6?뗯GƨrIu\vf sΫ% A =i"PH3.mv'( 43gf R+GoEH,>6ްk7"gsBcDgsBb`MpuqFw((gb0eAeVJ!2Ssbl`9˔ $DU6'`_C'[DrBBR(SB1V*KՅ.zx yWKg_+x+J-zІ%=|::{ۛۗv.סs}j-7}]W~u:7h_E bxP@o]Ygku|vUVV?\@+ujzO/޵#s^}kBv ់B]@|(^|zųu{r7}nصXv/MO;)lw~۟?ꛧ_<˫c.zHi˯曈>-Ø*.XPShLv߾ͿVehYVV~Y#ՄU i4iip+1wr/鱭j}EǗa 3RRIlMfX[¶uqGJseDsfՂe<3;UN9QxqZժ[6TW_̤avKbar闟 :-ܾ+L[pV#49,~dl15HBO@bB}EM!{>:8ykO~] Beg^|ѹn:j+`^:̈́m)D PAquG+# P0a<&zcf 1y?$GjR$plgZzȍ_1f}Y l|ؖboΑ:OvngVE,__]&;MQ0d4;S{LtXt͸%..sնg[fK);hS88,fSxZ&M2S>YVX'۷ߐNzخ+4%cE XՄPnK"γ$$M) 4*&@ZLyp%FۂRox;Yx9(FG nR`RTT H&*UBv\D D[Ϭ M76LX*zh edxfBl`lUR/oή1Azxn#%ڰ9Hq-[UtJ`qoc^ >z)ESF=}V_D%U;x~BU61>77_n?<{oOay'<87㵻V}Փw^m.i<[ulaZ約N0;6v'vPUSB }L4(S]"9Fn|N 5/Aʍ[ϥC I|{QӍ)F5WgQBBZIt )00 Ke3(F㍆OrnПtBRݹ|g $sءO7> WdT\rlФ3X #̘)F3[8j4S]\xf|LȬ3&YoePZI: #Rl ~PZݰEQۘF -_AN:䖟ӽh$ӳ/[FŻhQRt[ $^лbuJF0[qCs[wѢ>5;ʻM1Ы]qK1uKD!|>;'!xܧ8Cov؃y/w};ǝaܻCoI%_eyq䭴C׳x،\u Isͥ0|U\:w-fQ\G݀zM[_![oM11fjbR_~֣fh*4+]Kݼadr%c1| bWbe4]%ˁd xe9mȴ{)% (^QpzgY`.Gdu85}y\L"*٤辽e%%rK QPȵkT;S'EfV1 I(.5EdI!WG 1ƉũNJ$O$m!y\O/G`B4GI#6D%fVOXV, І3bybM"$R`tw>WpcBҶs.8{ &a:ZˆVMy1 i z۷/Yxc=kKxNSZǘ|Qd5z/E/.TV+z#9gx#™k9JWΰ,c̱xQbm(?\˜9j_)=qW|2jW?ޘ~z{1p{13j=(UZmC)[#RCT*8i_zZ .@K]%g\ rJL!3@~=MӬ=(eo)TN}i⠳6(vT,3:iٻao Q!S$ Bmʫ2HJ 2Ă$*q=VW-kpN֟}7ba!W_R~BqT䫛ow_ݽPFpB﫣.5;|maKk诸y;7=QA@ءSJ հVгm?=Xux'V?}Gs%/6 b=yonoy _clj2\Q-h F\SI"ZE&B3(I:a%AAWnC7 ^XY+U +aEh Kp8%KZҌWz_/tŵƱ% ;86ѪF%6 ÔVVŠ!.6$X^MHv"^T1^)jU]N=fF"e:ee`eNyɸ>VW[YLZ  c$N8;}3ƘYaWr4(h:$kr1/k]d.Y;:  ZU/_]H;;ʗuVAcDq1|aD@xG~O>T|Ocʠ)\F/~l׌۰}U]f}8qUGX1fc<&~cUc |Z90Ԓ\!T_u]KVڰ$9eG8}(Ñfz dzkp# kM56_a3*i`s´\3TqFQH0\s.39g VEQ>= '+&w4w <` 9Y;ˣ~7;y͜aCž7nӹ& #M_7H%ɬ6Km 5)GC<81m11Řur ͺ? }KUUSvlΉNS/H5D4DQ!hZPn 8&̹hAi=aˆcQiͦIZ)5 N;T6> h屿1(6)(PDܚZ 7^zj((Nx?hq<Y ,Q]"k uR"#B<!zWt+5+ av⯿eyB0Q&]XY+"OBV4<&B۽n\-mѻ29ZfzG.#U\-A^FZ~SF5xy% ?(Q8vԒ(ɉNXG;#qnU2sHx HFWI&zτ4K,sNh'PY.O炬};s% s\Q'L"N7]?Q:#'#rp(\X~ɀ ʀ9O.(pB_ 5e} Ng1~OYRy;c=wG5~.\ʙmsfp w@}A=k_ `Vl}Ry};pp+W,Zhҽ Q5g8mf'#mk}*uͮi:1>iX)-fYIr21Q?;O/Ó"kOx/3Ex2|'yO}K>CyjbuzM0MS;gwt驭yRqɟ 6_>~V]Kv[kl-׳x،E5Ffq;d)nLne|=w،LryYGʧAԩύ-Gˏw(k,Wim~۴(2 6 kԖuhyV__sʦ,i/f=k ,,nzmM*n;.Vn^t'J<:b2v}ӧ7TS 戃9CfJ;1J37JO!t5|Gvkyg8~4uB}/NP \B=wt<9TzWi1]1jLOZd;(aXhsPL8IAjJ!@V[1XtR ̙=Q)i# h9Rȁ5>KRb"RaPq.E[`hLx53>9@%9˵J,q Smm19ׯ`8Ƅ S[AI` #ot}06d \@LqӬ1?PoU:<؅ZDFv = XݻIQƕ#׿"-^?? IEyK0 E1m?>*Ŭyǟ~>ۿk*׻e?|!{bJ|kySg^e.|v5-ocv77[9]>=c 1gg;:{o»/6r+_oMm*ǑAep[Rn>ݱ߂ 4e -) b'T$2}{vWO3?H!b?9?SiaNIȱJӾr41jU+`c-Z )%fjJmQN9֐kTE/}Z(;[+|,[ iPz2-W8j@B@*Aqbst0f8K3<`3ii$\"Ai9F(#s:vP_s4ׁ\T b`c:+% M{ۃ*eb, 5BX2`uo/\V(r95Ҏ2Fe8/,-*}tJUƳ]KMe$ZD{M\Z@CC؀@{h[\բ.!Q`֨,н]"#Q*CK}VǠ.%AS{` P1ܘqմ9)H BgϾj^E_@@kwH\ iKb\l|THE B$WX*k ;F 1dI lI;8&ǵgĥPbjksbQMQ) (lLqO9/u?ذxx #wCĻ!G/ ڀ8TѶspwhE⻛[/ЀKچk wF%_Oc`wxo P~PO@PLtsaPNJUU*T[lS]@J\m>(|G2nI)wJߛ{LJgiv.?J@:us~޷l?|<;hbvo푇G_{}xXY>E.=ï<#ru{?ROxu0=oQۏ8?o34Ol6^zwd$.44AFTP.92jrTT 6oACi}s[b ԴmU\WRkkw\Mc9'kUNз=iLJ&:tVܨX<>k7d /J'e1E0mjkEvJu>E['[P5r[ٺ~D3:D%$ct5.TSED>X";ѷk*kr*Yh5ED,,ZSaLʦ4QZ,50>D%66GVGi"j I)o%b̺+2"R,'o2 2X׼aup%1>hE?`}c%PdtRL9aƹd`s bҀՖ}r(/IYJQ)>h Kp[pZ 9+҃!G#aB삉ƙΆOU Y<-4zϽGiiRê.p4L3Zi)XunSV>`hkoЖ1^W؊Ag_M DbCTN$cNlUV{Lqӗ"*t`URQr5R$iT+V/kh-!9FH}B Ԗ{nS[UVJ6ܔYS7{~|'O\Z1ZS0TQd10!~ Ouc/-0izp}$AF ‡^7K_hVKw/`p FBXpa)D׾ɶb" %%i&st9-oȗuáqOVE_Q%-NADH"^m*hwh }WggH]z=C^5>nf\5kѦ4C)nw@ԓxQ꣑kZkllG `GŸyt73)/:,ݦm 9rSc)9l:o'ζ;z<YKt>OD?Exl"Lo?9]/rB

C9!NQ 6`x09ܲ{1l6G'K¯ZMuS >%SYl!~ <_L} 4Zi :++NFUU1n5GEC9eݶaX("<t%Tbr%Huq5OH]օpj Gw,AU놰ulODuim9nUPzr0tZ]V ~ }mcJ=sw83UG-fdyIjʡ;1SE7̝_BF HP-WG N>;2s|>Vspٱ؃@m+vI=1 B<^1 I&k T{}\lvcP$kxVPAQaܪ){3䂪Sq'lhZf;^ahV׃@hu]-S1NΘ:sSb8yƥ|1 h8p ?\Nu["C,}%hpc^cN?kno?4b 9ˀ3VzV7W5n C'Yރ@\zc_ <]I'a;a7ԏ*ow\wNYo1[)hu+jq:)kK}uM8I!5qͥlv_2MehDz}dJYh _ﺧ ˛QU)}Ř7\M݌g)e]Į/5))kt)aVGq4 \%qnxgï4PI)Sl&"cL2ĺ z\=x+Ew ?ZSN \ UQ:ml筂; '^Up)eD+{f+=xѵ'ROQGW!{ {@u*LUjrV)٢&&=`nz 7ܔ<3ijޟh!=ˑ5zr5)*ckب4yTl8_SIpa?3DwVOq*`R?@*`Z.@B u؛=mWk 'A;_n"cAoB*5?%;=~xа+-aWNŠz.83~{6dʀ!@,m$68`aH-;EV)E\l&@B 3UuWNF5B)uRx PQ΍a }Xi[Rѽ3ٍʞh iT!Z*;QUgL!Ѣ3aeBq ͍ @Gc-_52[юtAqGA64 n3}U6 l+ZԘ]FVw- FG(qz4oRMsmrtA~PЪ A-j~(B\[sg $)ҦӯSlm ]ҡF[=M*lEbxU0hitY +=qH"84[ $]r1ˈL|WsHr.'nӸ"3Φ*] \0p/~Z4Qvzu5 `]^>F[bQ&1/@HOKHb痃wѿ>2)|MԜGӗzčf/ EO "%(/$wIw\vxexh6$w۸;')wH_ h2@|'̀(eZK5HP$DXm% 9ңmz複]zLi'UJCڨԐ%~zEPb_K@wG9FvpOp/H'kGԈ&qt둓J GIUGN:/ /bI" 4x~X]Ⱦ{/Nb4YMF-.*KU)g.h;,’@Ř\8%P ~D1r8hzjӔd7H@aM[=?p/~}sx2\> &,;4"rAqSOmP;<1A=r0_7H{cКr%F5Bbeȭ"~aC;z+:@)M#3,Nj`a 0EI0s %VI 19"-=@LbCrչ@. #B 8N}B167]ġb!a }BCrn F$ ;XKAjHkd.ep&hT;V/o~yWr$7yQ Cj}w+.bW^~ͣq+gHHz똑~]yl@'F?lXSr<7x\+~]sŇ"||ϵzybk`>xRB"t_ <+.l"% T؅ o =='Ĝ{k~1Ȋ |٠k 3w/'/>~9WLu"w+xu~^ak裹wd2oKU`*|@u/AH Ԫc0w9)$;OxC1hn V)'c5p H3 hwYט}"ӎw<bx2@ /}9 'br$2P 5LzR_C@Fwt&%/eߪdp?DBIix!8.1 )L1>$ѤI OmK0U[QpXs,A'k/{r…@)#j(`zOIz)Wwr,Q {%I>x8%5H:ꃴqIM(2h%a r}NXxŒT&J`^Z}nV)jsoAG x Eae΂ $ VibνX rWsڋYjW Oaܢ⧋=]U $茶g`k[+$dG? ^3Wc=Q/7wN/_6NXzL9CBcCgu+STBk[i Mg ?gŏ=4bQQcm 2@:38qA+%9q  qxVRU+S.dƪWv&^D6 84ƃ4kl Ń!fh )[ ɕd,VQwC10j3p7>7S+ȡtXp^a%BRι5 Xi~7Q[] MP˒Am"}D9WCyGMj=D3c B`wofϓD`rgwv[=ᇑÌpgjiNľՆdIwxXiTӝk'-7;878?Ul+FίJ:VGI6ճqo#\l-# >nNŠQ%K$ɯ v{`5ps -,b,o;c#'P׮U2,CxNЫ/MC;Z+v_/~~GXւ'ϕܸe>a4__iyb؛ u6],> 1U5hOfr<__2z~K֫Xr#*YrSm 0}wۘR~ (Y06T}=,de xI˚f(.4[FIlyc H$Hq(,_Snw MP+sh1EA]q/QxR$ {fpB"&ʧH)uXKTUKԗ,S$N`"BQtr,k?_ᨸIi,@"#Pݸ"qAD#&,R2xQU+DssWyrE([!ڮBd!W[ϟnJ,\jljtRCr㖩Pk~l@d*i=֒wjAb^#$Y,YMu(>Qm`gYWj:~. ss rZD3%Vec\N7?ZLcy\i|yq ?2UK$c-d]+P;aoAkύ+ Yqϊ-/Rrg-q7΢x+M&P5Ai*ҭCch[Fҭ Y Ozoq>n>H8(:MQEuE( l޷tk>RtkCqMy R!fcrgcebVn 1:0$ډ `}Hf$GZr VJ 4iϰɥ17Z!ܮ҂\a!-'fʲ"3hpF\h%g7Z+kQGy;pe ^%F!QΝa?%-495ԇ`YӭFpFY ʛag0_Ib2#I$ŹF"FX5|cA5)f3 zHaȁc1d#ќ(oJj5k"s V:w 58@9G&0 &b6d`Ht { ]SxOi2J5KQ<(M)rUniS C5Z$js<ֱtBZxN9=)ε ؏ 5lFPx|g/'6xLD[9Ap 238 Pf@8p8KݪߕM h4HS 9~}v~ULS@gz8bq"*UFȌQtoḞy ~/VPfpܧ/I ^0`(¨;Ar3Cg;p1XGe,sLgS`i.aH{X7.Tr!IQU,9 *JU.-5ʴ1W'P% 6z^p{vq<.wiXU@Ysi@L\H=+ 02x 6А+-M.V:v1 ϣ8V|FinB3Ծs{"%v6f{򴸸O ^tBʾ/ zlln.ň[.r;~ۂgf1J57!2> B_vٳ9rY+/.0_q J@]Ҝﷷ TEAנR|jiQ^j'V1muNc0CN[-yQCyf=#X`Y lZҵl)=Ǡ؄2\Hx_Rt/r\Z@MG.}\ G).庠7>&Ԍ%#>?.T]o9e&#^費diͯf{bR;^/ӠvWÍ܋4,ngW40CNG.Lk~cdz|ۈ%]u=b'T+(?|>t%M %Bxm|$ǫ> e=O?~]Cqswi8 /80͡i?4 fr~U͌BqvZH_1ݳ #n*?mD,0AdEmgjto 77 ưktӽH- uk `+#ܒ9 _qOFo+CJҦ!zt!:B82OHY| \< vpv \G zE>Uw&09Z ¶@oGܮ%ZD{3Z%-fWTm&揄wx??-%VN{v_=wqHOQcS|g6Mۂp-GPZ#+H:3ݘFϛ[p`&ڸGX3h_὇"վ7HIg*4;Jb輲0uMROFXCAGa7]ՐZJ-}9}  Vs _fkf!/Rsei37xŜ<<6'R8'AU:KKYa/Rs&O|maP4ճRj¬}JJzʪyXiXJ[)0+Ud΂ԗM9>q[KER9+qi!5PqZ/=f+U"{ŌX->mLY)Se2(^%6μi)#ҷ*$1k.zW:mEP1qs/qvN z$還AH_?q.htŸz1&&ҹ)ҌSdy,]٤! L%Ƹr<}m!bFUؚ ^hGv 4m$x?Fr#£8lj2!:ƨ4!e X#H-j'nr/ >{/:_tJVdDli;B|oVPM_݌ CX1)ÏmJJN"0%5qDM1R(sVYeb`Rh>V@VS4݈R Ch#"Doو|7{2tPT)a}( W 2@AP7Li>>A%|n֢哑) 3 Qd@ `>O+ #vb JGkG6}NP݊z13ZG!F%byShV3UO?u47kZ.Ƣhe ĺ.̇-,_V5[w%FupeBnT ;s =f P#lk-C0}Qln=ʫGn1zN°ډy P/ÝoMY(rsL~IM AI(>j|5Uh=YnQ65Fqn:A>w.Br y@և|&eSJ>lz7FhmWvBA>w0`68نnG[hæ Ц`EDsC CA < [)iX /S}_T?Kvu<`Epx v_IPO~eEƹwWeP|]}XzX_Q?^:kԊ_z_6UrXO+ Pʻ,mLou ޒ]1;G&`QlVs%r1TS'aeu+@5=Q=3^gˊkOz%3>Fdi-v.F`j=0RxFTr,JB%݇fTymJgh]5a"& UPs܊K zn?=ckee_2yVQߦd4϶F6)2_S@8*0N&a:_cxӜ,24TӥF.w뤪g<V򴝬) I}\/i8L}~'#rpIcʋ?&4׸+CxOjQMFDyzT}/vUiIGNI=ŵ4...FY.bAt=RF bCRDQ:3&"$Xj tKMd)q)əvOaש  [Te54 + zgJY&P@(t4 wOuЃP}ğS %'N'asLwFOMy-x(>ԡz+V[dQw)GB\w:|44{|M}4ػۥ5D{,ä9)r΅KhO#XTr tPy?#0Qbu%]܂Q¨V 9:`L90dd<~~ VF o>f(Og nJD_7gT04hξ;~Pۋ_1jԊZ5-:p?/W@P+Lq178[V`psu?6€q?"܀0WooﮛYUf&VJ^*B+ځ>U VNvƬU A^]?iijr{QP!)Y6J zJX#_sm^NaZM"6 x]DR CҔ'(Q )^HLG2R8zg/$n5Zmn7Uu%p@گyoX1: G=[5*HA\ f%6BDG9w/02h"Q@0b 'ECP+N D/j$`z}/5v@1*_[RJMuI ~[F2RLcL_]FHP64Ihksd  30K%L*C|$ՔIkȁ(ڠI:o.!?ssFx"|KC2'^F,JHdO&T{}]{ eWcBBf~غw0Ψ7}!x\-TZUQ/\ 1#Zr8rijOA ']s/ILU]чMF2I+mIZ igʫGn1zbjQk|_FbSHJ;ΨU;ZzepF Ax =`  T@yV'~$Lqa(k( P%MVѪ.5Zt:Ԓ tR0]m#;BjQ/feftWˆԜ @E t|1h"k=Pewwgwly-:],~sٽşõ+ٓh |aSc3oߔ'(~gbssw~=5⾽Z=lȡׇl]Se֓|&cS[ލQ [,!6mSVKn=[hM}${WJ_fw1Uq%_֣Y,`^Edr-YޅfJjPxK6Rwk}F6`ޭ HB8D0(Ǝ㶞Ga/}4\<ʕUl{ॽݧd%߲-^e.ii;)`k{i@dx&1dF7I[8P ie Fi3=fQv2fyʿ2Vc!(7 Ӈ`%:Ӗ*zSI,&lҺxK"_tFjAL}ťUtijd&9s3>(@:e55 HM%3om@g2B2F xߢ2T,T`|Xw: !萸ς\#~r] KLR)eJ&rPΑR@BFeP+nёZ+`+qMj(ml'2QQ4K0I4%&/67&&Y(~^^q]ZuH^Uz,e,Eb 3$. $UP8S?܏L[a\7..@[1VfE6Rv4£t[Cf<y4ę̉L% +AJXy׏Z4ӢVF F{XciE^5' _h+cc˜CC؍'R xT .@G[Ik;6 ̱l%Ǒ>ّȇ\ - h@tӵzTI=BHɨQVo3ת!b0Z`$j>XzUL=ZŮ(2h~Π`1 ¿&VÔV O`T>XlDt~lܦR=67Ecj==5hci& N1Z5dc:ݳD3Z܊ѪEE`_KF U#L%vh5hЄrԹWX;WP7%? ىCى*P9kmA3z0v5oBdRSeN .B:T2ؖjq/Ta N*LܑZV>F$DhmфaDj5!B! qMnґhDV1i&mjѽ=n] CXN>nF0OA t>wsamn#[yGYq RJrdVqȬb̊.0u.RʦPkspCe;zɎu W-$^۰1bW܋ŷ[7hPX7+هd`_ջ&tS~чw>wjWR뇵y7im!#m!K3u o#mdGW'mdH@l&<ۖo!Ă$(嚚ڜs@>=&mԋzLt2)*yZ#'@LUb ka3Yᠪpz)QeK.SARJI|J){cB:CR0Qf2aNDA()6819Cq$K|n%@<;Ľ+ȳw J52-JPMF Ƽi}P5`1vRx Fji@a'NJbT+4ZbU$H TҪ82R$e *HAҳ*@n$Q{ IkUJj !<Ӑ5e_b5cfB)9.Wj(fNBѡ򳐭rOByq{>_+os2(HrhҾ)vm b4=\ԙ*2JrL*Ųƶ%\V, j~RbƌxEhVPL>cTޛ@֖9úR48A92ܢ04dgH.ٙA_XCd MBjL+Uɑ*Ӛ2\d¡ظ ]u #(WF+V8wvLGzeq%?}koZѝ<8?S8f`&R9 )X5=vYDE5(lGHMQ$ŔO͆NOe6ikB Uf6%=ݛD֮܊脶EQ8esքȲι_:S֪?3 ݒxbFCjߪ{O {گfܲ8/[66,{eo|IX)LBK8sk"{&8\Vގȣ lIoj Wֈ|67CHܛ%ue8gT]RY"YXǗ {YA,hDpRV neMmПaAkZDބ_kpl%ܶM>pܭ'!b ,^{6noob?+ZQ_?Қ%$QvO?'6{@8b/_bp?|'{>:MoI'q(L>2CyRQ|)e~iJF|H BCRJ\kTeOBR>N,$ dtUOOvؖ yvسv1'*u٘(=&e]jJsD)8-])lPZJf3JgRy%\|Qz(=.e]jx^q%`o-$PzL˺hP匯h(ERj͕sDi\siK}Yl7sD8J HNǤKH(U"j'*\VOdq(=.e]jyމ7J*TYD(4PzL˺ԄKR8TJ PZJ H|FQaR宾2@1/R;Q:cHVp:H@q/RQ:oƝ|וY͓@)8R3UӝQ:CC)hPZJxKҸ2_ q7R3JRq(eJ%ơt-=t(5p k8rqUEFeNXZ(BF}ZɁkmG֓siSHc!1ɘd3Fl`n+ΫuGֱ]!ONUB+&/`IA2L"/́_+*m1~aR0fOh~?ɗLt\̠qyD̨%N?[ju =Q"t2~yΡoO1dw@:bf`[`=<Rʼ|g.>u+5t`$qn)ӊHEfɀ\\H~Igc(D'2䝤`!*PS/6R^d\j?ZPS}[;jH5ҊT C"48DP:Y:ɋD Y(g2M<9:b)R*\*P,wȑjʔ7Sk=N d_mujHFP8|6TY\KTaN")a!4 rVXr%" Yb7~ }n_A\ 6̢RK[UD+R%NãTロz55 rGjiό zM#4:vc!@!VdhF#\+=X/F_S)봭JD\t>prISЍF=xh7+}!Whhrfzrۥ馨f jJ][ODOPdxߵ5nxͬ~uzӎںzp+,j5T9{i"yu+*D g l60^Sà>"<`M R]j*H i^nCmNZ3k57.@$$8n(n~^lvaoۇCϭm A>,a}n6՛_C[ [%/tjAg%7w7}UMv߫g>Wܺ>YkvBn9Գ*\':Bh+L=z׻a;tDV1i&mKKf0־`2һu7VGMZ1V1i&mUT0wk%һu7T$ Xjȏu p:Ozo7wʟE5,M>}xU~aK!jKٵcd%"BI,c$Zjsy,Sd&b(kp(b .[#I9,yTF;R{+5 _zL˺Rs9^VẲq$|q/w6`{(R(Zѓ@)8R3i(%RJ[; DJ +!ڊ UevMHBg+ pXʴNR2E*CYoA'/[yZyb6!9IV iͤRHd&.͡p,f9Rg $v 6M*豊$Hȕ7V hR$e REڋ}jN\PI*JS'Y :щ_Q∩6T\esm0 ؒ.dkW^pcGG{*wR>lya/w¹^im|e$Fn#D1LrvLI6ij|in$F(c. Jzp>i1 `{"[bɋ"}14LD-zɅ4KH/=y3!5VEeq7 u{&BI2>+u8{ƶ0noׅy{h-=m+IR !(繺MG^`gR?\ D}un2RR_o__0S_o~皛3ZÇw?xLzxx޿+ּ_ow\/eY=}JpX7\=nwt߆,݇wO ![߉{獰딻̻!LSF"PR ]>%[-?]w^&b@xp 'ƹ{g " N?{Ǎ~9fgD/|@`{K`ÎsB`َ.#{>`#!hf?F,qV7bEV}``ɧJ™ vv!W$|`&{{ B5W?~Rӎo< eZ!WF)!}JE@b#uD%ZhKZ%0"W8R$rY:Ys>Ϊl4!IbVPa$QGCYYDgсr^3kLrgr`DB}U>mS'?Y <"~ }ߏ m?k=L` zm_sEK݆zm(P]94TKP5jܑ6^gj$\lz L>b$pOkcmWvy4iӺ]ۼs.DeBVj 80zh,"lEVVΉE ǶyPV܏oXD3K2ʞGZV*/JKa}|Z(+]R)5Fk89H1{cWt{h)1>Wć(K朦EUPHrW*z) |[\7(#egE"[s1סm(qEjTCE8NEId)6X`H* ZRAK+K E>2 ? f}z쥬+d t1Lgr3@AԕƊ ޑ*T?#h~2Z)?OMY0r@,VF*dY UAJZ(j)D ZNl1P"T"j9Md٤ܺ 4Lr ~C1/7\g.wsk>^ΔBw|Q{{?[ v=-abJ贞L{f.nuw}V{ۓoo~`JxZisss}8\"9[MR<\.twqkz_P$J)NoG0C+6S L J j[}JcMVwTˠ7w/' k'=;݋B tbJx| ΋p^>'Oڭ 3Eǚ%9/ SA5*Q !y\*j>RcK%`{aX(#5o ĢSE0vDi-"mh>z 8D 3rWJV36Vg2Fc1qLIXvxhhllބWYwUۑx̲AZNg A?|Z._gcקxI|DŞʔ$~WCU @˄ (R,}='k~q?.s|:r wzW.FT \?..6]lwH| =+5w m! Յg٩b0=*jT3I؁Ϳ S  ?#{ ht_YF[7Ͻs GC- df |F@jǝy#tI׵^'c3T V?QK;{NK|촍GO@DKS΃j?jƾHw]׬|z_G/2YKȮo?j]>x~kN#6+ t7R*6XZd.,䅛hMIB[M8r11oxv_^#[M4˦ڶpǹCnĘN7RۘQ߼`-f,һua!/D[۔$J(;60AcZp@ΑR&[[ӪfMp4OXrD=<\ZU83ViGU-\**ADa̴̿WZUjgz͠h<8J1RҷA}61% QD[ Io:x{L&WIʉ@W~o0g7* x@XE ;s xEݲ{$C3 }:,B%jgk>g\=JDIR Ϧ k?j)ȅE~R.%ZeS95;BAbs۱SDގua!/Dsl mNo{7%?Fwoupy|ޭ y&eS~ǹ(czL L'Im(( ݲc[MtæTMEjqΏǼaA;et&%ѵ@1yЁ>t9ufG?Ǻ?@S G{Fv?/hVs6Ct_7 ScKIX@?[-7gh@ác#d4]EzJ)ƧZ:J)ͫ* m|ry u]H]p e-R{5hOYK~k#6]A"Q2z37] <{jqu/>[WVJD4FT:iSVʪ;,Y%,1 ]pyx&njA=_s@۶p'Ⴒ6KgMp ^/!8H-TƜ̈́b\v*6+ S)dl(fOѭ4KS1UP& Q{D#5yQ'N]T&I'o%pZj?"X9/$>cH a?fLIa!XE)Gխz<(_ @Ɏ~k?{6%f.)0܏^w{Uu'~9rL&jŰh21꯫q[mh)k^Pm)QEfP`t] 2Bb@u\_ϘpfUS)E+cH%)2YoeIPhP Ga+!K5?RR]Zpp( U$X)QL!DbqkEiFZ@HI)NōH]Cg %JR1(R z܂Ԏr'E :~6)p Cah#HA0[0 +SmHF(gH8Ja ':B Xr%1 &'Y%)!AV7]UZw%עk[3P2OVyfA gJ߳V??{pn_ξoWoi=>&_~y{]RYMLn'L`ɴ8ۿ,#3{9N E p4\¿+>9ř>-YȘhPo! p 0FFKTF, .0"1JiҎu^#d3Cxw^ y| Z%⣅@P dP+/H?:pz48Л]%x)0M$-C#P}lG56*X*yb0U)!բZ풕R;A) 9nlũd=Wg,2)IwU,Bh൓kW- oC"]([:{;g~\-"$M9:>ڒhA$%m r:*Hft2C; ZZ1w.RBizU%|2d զEϭ=>i/J;5a@&e# xƻ(_'\wj/N՝ HG>ni_PgM:VF7R=33nRi.i5yD#3ݔiXƅF.(FBRE3J' 2o9~;,圔JT)U L#CmcW} ?s[T`2 QA**?[ih Z܄8eAeزdδ}Vvt[BZG\C=_!u,""l06}Q굧q;eu; >Xʧ4'+>E{󏦼X<=& EFE-:Ij\jp1-D fDsABdpd3ֳW$t\Cn{C:h>Ӄ#4`uDA"\Ey8r8~ʖ.G8N D Yȴ?!Ѻ;rH!pFѻ!qDaHݔ+IrjJR /yÎ" ㄧ|FnYf-n,U!H_8J*,'EYTe q1jylLz6W;\-]x<|Ƌv1=H¯7W΃rO&?%+%֯^S&g(=g|rk&Oࣆ,zw.nzYMyW44Bj&(P%"XKRi]8eVwu]H].n) KjI߭H#Vj TU66b:Ȥ+H 'je`y<{U/ @:wWN -?{׶8dEݝipK\}nDnWLVGvI~%-ɠHlLWŁFyӺ_ !Ywb,C[e~UgYJeKiR >YJeK 3KOLRB X39f i,$LRAXZX1g4KjbLaV悞sSf$I=~n!dO>Zc鉳4U)RFXZXmHISd)i,E0XyK 1}:gORX* a4KY=Z-3g"KLVB'RsIfuдJ1l{F67vZ56{kEMUՖy&yZA:7.vws Ué$L{Qng@$oO{jT-C;ln>F5.a=o6)cTzݟ$M&mmmf1QKGUG|{'N{Ǭ~;^?~5_/(*þ_bI4ӠF:hB%ƄhЃLkf!c9bl z$y4ie펃F  L:h"A3$3AJ4e,eBWgHs- 06Qp4M!ip@8pH$gte#Xɷwbƾ~5hȏϿ]WU{(LJ[] 5B S63oAWZk};#L 3?o߾wz6_,y_5mN1Tr}=~{q283+buw~1`F#+H5Q\n`{ņmzRi0gcÐN%`B?&m7ĉ: =Shi&V?Cҥ ݳMa΍JqZ+-Hpj"B[L \ޞAS<41[#+9^jj7-E,Rk7oݱMYbfH@JuUkxEzZH?gf5v\kCܔ yKYG/v(p;+=]S%] ӣU{lKEye)ãIr)՝{qQy8}-7[ `nz«o{m&p,LS7/osH@Uݕ+u W ~H%p0íVUR(du/籜a6xU]%Wf[bhNQ  {Hx AQNVp%]~6*ձWRvmP0~${hЪg[>س.#ž6vhw/q>t}(N=u츺x]ϲwG$1Ur0VOYYOĽ \Zsg)$v|Pf3jMԹ HZAK%@,}ijϱYJEKf >:cvf驱s$2( -s3zZ89m&eOuEO X==XI9?m&)+aҤ!V3KO41{<b)M̞V'B˧$}UE:&Zsg)OVWqd)gV 3"ѴxmJRhf(pYgB;N3pSUYŸ3JovI@M@cǨt= 3l`ccUc5/JG iߢDpL5a%ZM}x#^rH#ET%g9E>fn"X߭]w5?9 呴** L}oom4\̹0֖oã15pr`,dAɨHN @Tv{xA-ۋ~wVzwbʍ &Ҹ!$2 wVA''L;hEճ RjJ/Dޭ75rN*#I8q4l]I kQ;G0Ҥ;-HSW 1@tW|-GҴHMhEiSٟt;{;)ӸoY|'QSZrQ5lT P12ؖ'BjbX93U NߓhVl;v4m3 4^$?_Mo x+l(mbo)ooq ( ;6&_B>. ì|!"U~ӆdbԉd*y.EP9mhO9ۤ#S(itY@).~*ց%ZX 9~{] /meeպZ;{{_^]_b?rywejBwȧ.)G;tIMyƲt}dOLn#Z:q_XNHt/&}_*Uz,28lc9J-vxCҮ6 Ysl}A$ht2ܼg1ƴ_E F˛ ̝{4oN 'FHhN4VOt\ǫk.re< TF3Hi a$XVSoܮ;%ͅ$^ZZk ?9==[o$;♵e ɥ6sF2'`isX# *,m<#<ŶwW୯!P^?~5G?F٫m!L0pA8s)0&[ν-xs}慁 oLD4 <.A =p չLP"y"Kӥ8!vȍޭ7Eq2[RRҡ]fB.A^ot@wClOῸA}9zޜ<9?B*nFtQ1yHd@7u&IDcLubB0VGJvόufK#c&; xKS[XXc -gQ;`nL$8?169P `P,XF]G:/8.ۣX rplIU$]QؕtՂ km )t50ġgr&;Wg؁@C K!ƶ.HՅV$yӇ`;3P֙?>o5RBN9c;1z!!] /^ p%]ܑ^=o1N p'᭮܋hE Gc Pkufǐ>\#ܰErݽGK?Iv.xh6Z3N=_U"&\[s @m K8S (Xj|iv#hma&(#a Qm̸;gJ02MuW( 8kNm&AR95UӨ5 G3RXŹeX%TE 0 *qI21Xf+q_dDK"' /YJX܊R|T?%bԁLZ_pk9Ўl++++~)l҄q\D$(c_\Uq}O}I7cOjgiYXzd:z :!K뉉846((b9$"sٌcsM2Kw-,,+`To n')­$ؙ%$hg!-y.r)0dR*M4e8ehɅFKʬ-dBf䁹XKhaVQZĄ _\T.vNRZ2*Æ,@2B (#*Kt$9HQ(F.HCKp FK@A>F։]0YWb3D0"$U\׻>mLI⒟JLbɎq" R*IFG!ޝ/UTrQ&=7v!iIÛ,컵(A+P~dfŀ[dX #[lK$ϰT9仗_uȣY䵄JaC&{8#\"ubMjq}c1Ɣj<~G(r,/ìVsuVY=X9!\>ݻwJp--W0ٟ;?I(2<:Wu2< Iv}7K!pZa)(1ÔYs=_Oo;w_OWZ}l{zHv,N Ϻ"Í|sP0?^9e|Z\9i>yJ0јj0AloE5](V_ (?6CVh9zM'-ַ[esvtKzj.epy1} 1JCsr}oVWZ=ƦZ / ]Pv/7Zоlӟϫ_q}aWbwJVh=Uwg-!C4 S~޲nxшZX@'v:m8锋ެ[|Yuk!oQR8f€-V!Љur኱f[y`JbJ0ڎ`uv|Yv:sCwW+BS^#G4BwIj:n͒B`SRF㒍'K'qess㣋uya&b/9W})]kjj]|n;փHnz~WG!voR:CoNbjQz(< ϰݬ{1CՋ*^3JO,Хg0AFCT/T['WQz(Us䗺U T/6J1JXQՋ*HQz(%" D~)G@)a(-&(=m C) FpCT/@(=i@–R5 @oT F׏k[YjcsyAO۴St} cI7cvNQRq΃TJ1&gbEj1mJ1YI* 4ɅI8LaHg' h܋"P)_eA;4V_"dH(e')ínF'"4U" ΀s$XjM2r`X($:wEc`@ 3?2y62*$5T+fL3+F?N2  Y! fc402VJpuFiG"TbaiδȘʨ@0/@QJ01baWoΰaKED!8tmT9W2Jf%<JdJljŪ[sLÚ DԌ75>ãﮗF+ǫ'IGѷ~ΪWV37׹nt}gqwscSɯojJ,ޒ?||XxB|gHIp+w[Fʭ;Z_YR(/<{P=OoŻ-HFĪ00˨ *?ݗ&DJi>e]ԚtjL򥦕w ۇso?<~mޯo3W3A3ׯIP\li`p*/զMYsxlRҮ.f-Y5ƢoSmDvAoGG^y;9OVtC:>Fm }K d~2!z2>?%ΎP[!\?M9RwV7יy8MV++++d)UIF05p,RF@(V ۙ%-Ø L~u|(2G_2u~,#;|Vڛ=6Ywxd+ut@U-)}W&$aĚo*@] kdsim Ff<l -|vA=v-/e~Eݺ[3$ üa"(:r|ڹvHa]}0Jw&c9`ҙ}P*Ȯ=I~{آ=,q?<ΦEZK8У*_fWM~yx}WlNʷZ3yOFhZš5׆y٫OdT(*ntVl7]}R[d:#.إrzҦлrt\G;2}o\ҟ’G cP?KGJKg }gz0 Rqg[NƯQ'EoVGNjK,w1 4xhd6/[Nɶk0!(=E^F/?d2I/&_| W5!~֟[#lbxx3b%w!%<^>À -6*7]ئl )Si&ujnV%S"yQlaJG^T{GlӺ(Q.nZK4ev7(hPH*RR3xt-I58Qk2$a,W3%UFF)r%WōF(ZEƞ7#'a+8=}C:J84u~ܚiOduQ+^WDpz R1@#wm Bym6U hwg ;W7'+(b%2tebm2аzS Dx1@֜jѾE}ԡWpۻu^%Q +D6Z)q.[XpDN*aO'lۺC U!+b]?PDhF%.sյżz1^CaެKzU:̽a>Zjz Lİ^t{ՋSeyB8D0W-F45uAtbǨc݆NB-muk!o1Hܶnk9uAtbǨc݆EGvmZ6!)Xs ǰC!oU1~J-n8&R\@j@%%47L ©LR) j%AJu>ࣈā RA5 'IPPj_? zEdAQzj(JKU^Tf;?QY ChT'Q瘄=PJSآZR~FI0–ƣ@)a(-f~RPR9[ ZHrFIAJ(t(=DbjQ}FV!(u(Ecp?Lbj.RBPJ~QՋ*Ք{RPJYvq0TuFIPJ͘%Zj nrkHaabUb%&4{xF/b-R75N"+sK"jGխ-{=_>IZ]-%cb[-v >ZcbBB)Yء/v}6?yR*nZPIͶ6%?Ղq:j5<0ok_2ءW\p1"7+G1/Dma>Ks\/^]*bxn=ŇF%D+4C&RH!%Ωt/ѫ2ή]˺H#z۩Kh\E )gGSV TUK[VnTW9&:^(;Oe*{?ٟPm=dݮZ:B!ƸIǁ<fTJ4&=^aĔ UrR'$"4N&B 树V 1A͠Jq*iNH]mȮ Uf +v-/12 ^2 # ?{ȍIzXy8r0A.gvf8؎l粋CdOKdyHl.~U,^p5X8?9R V [1*l䌶R9׿]TV w^8P8%~Ng_؅.W+W˻/nlCL)& ejWѨmsBIU\2#K,UYw)%GJKUK`Fb*DyKx# ;1nߛnF<C^(`d#FN 8X(47R0JπQ{MW/?CwF-IVOϪV+[ . ѳ>?YOEv4nᚏ~қ>޼='$dûTw+|{f,ا/>ٯ`ʓ٥z4 o 'B`T'_]Bu(E>Q0!x2}@w[2mR ȖyԊ3^x_ }ٌܙ )f} E=^Dpx- |:V@\ӱ'<"r-X9\Sۻn%_rw?yI/=K42uI) _e8$'37)◤?8i9_`՜NMw%B?M h%bsBF"vd ;2fK06jHPEc٩eݥv-7<{"leHMX&*H)3W :U㕹_9DB^fO&ք-}Gwnu 0^|HևrͲ)|xvBA#ŻM|-&NwB^nؔ)YuE⎞iak?vJsM._MDz#/9aE +V_MBc/JR\`AAtȨ3AAk(-E\B%~STSN\B+( &)4q)YcԔҖio|amӽޞ<0AfPxk+asw>1TȽhʁ"zبMr~2"Ql\"w-h,}Oe9"o4 EDYGgEcM 3ls73l %h2:IOw5Akl&9d:Z+9<ҼAhDYr Eःi.!6nZ F=<-lg0-lw]>= -<%QgṂhNN ;kK!LG)haW.GЙAha~YNF̠)~;Y-{ӹ= ⋏5JI+=lf[oi*Jv]@2QXŵTua5i*7ԨAnRgruQѮQrlSj?K,wpM7!x`=\b}X+7b@~&y$nDN;dS%ϧn=X+7҄wSo;w trߑݦE(*a4%һa!D7lF-3x䘓w0U/r40x ͻc_>j#&>]A9]RRlznq SO8b>l/(o?mxfjz/./CtEiЙ%&gZ6tU_2'"|vT@D دR_'#'\؁ U+05Z8VFùpӊSUĘ8[Z㙠 y^)5+5f,o\CMH؜ ,T|n- ҒIYfnnlkL-}ZPzAK細-47qZXke*tU ~7*T/O9(l &WچwGuMH8f&pjGmw>#5 ˆl&=wwWsfJj.y~;oh3U#Zؑy0 !!Ov: g_?faϒ&wKY_hY3Zuzto\n'sx;<0mՂ]\\kr05TS4*K]ZJPu# )b.qПO_e*VIwfE,Q-x=t*sg$."R~X; f%5 G(s[fU-kbPrOK ^di825YxkE!ɓ]~q:EhE_1v꡹''X"rHVq7oD\⸎`'c+yE.7{q 0"gww|zt%n'^:v36]{c"8*1юWhBf/ryEj M\.—vvQf6: yarTC^nȕ! vg)NxJ!m;p!#2 f:3Y{*9S'n+D >yjv!*W7M`ΧHNQR2Y{6u  ӥ^._PY(k^s x:$-%wFUwcWGa@3iN0LڎRITFA+2< QZx>pU WR_&!5XVtܻWL ,^H*޺1Ljѯр.Z'ӝHrc~Z˾hfǴ55;Bj#$JVvh6R1=]nTD)h;>e!1x}>='|iЄHǡ] 8dmPPi8i@ wJq$kA9Ǐ<'R܏3tkx ~~fp.p@ry& %4Oǚ귋y8 '޿]T2zlq}sbbfkd7¿t;?6|lѳ>?aBQE!w |Gx*mؗ3w'ޣuJ!_^z߸;o%Rg~k/}R{Sg AO Bk]ʷ:=),=]p@=Yy ޭP)¦JCXp6Z/ަXfZLNwS EՈ$2ϱ "҅Q@ʹ=wm͍JrN6Wa73INL*l^3ڱ-$Ϝ$)KՠARI*&_7ݍFX9@p4%dHr: ΢o/޺Uo o'5z77Ì ?fgԴ쌩{7ͧrYGM./.7r?ӼBHнS&a>yPa`9.FY?mnS=՞?R8U'3|*aoE\F(zx_jCs?Q8W[WnCo|%||s.&՚%_n<97 ~ErMUpvjԨMo8^* $҉)1V)8x"yܠZFR-#Tk IuH.>O&KO5 i(TJ Ub-/ʫ`e}r67Pmt5dmPú?Í-l4U[2!%mWv /RXu /7+~孡x<ݖDYֺIa!5(S7~#6^_RzJQ [;7<;kHB^&'Ooi7MSnĈNum/S;ngڭ y"$SsS8?vK FtRǨݎE`V9j[gGj&$䅋LA'S0JB Ju>t+IvJ&<@g2.V>fkF-הPl̀TJcIYKӜ٩)lVȎ?$$1c0%e;N-GIL83G`JS7 تBJ+~zI d뾗]yR(-bvMX͵Irݚ^L I%$W̷xW iL>I2ޢxfO&:a#} 7\`N׫N~ŧdJAh=Hg qq, N0HglCHVclѢ2N^}5Dg_{VP»cB5̋[ "0e犄m5mT %=ty E7|F<7nq--9-67"tg.ms5HddB?p&qJ; + O aP 2x 32M)$[ѺVY'Yز\RpŽy P}a.|pQCqpee v2 Puة8/C9(,5HPȑ"k y!].vCoʀQdN'@Bɩ%jC砽3{ługSx~q$Eeo@/gIgLH%Nl+I&JU.I$ D,YUxF݅v;c@@4AoRQQQF*Nb8sx&u2sA_ghڝB3w\`\ 6_'&XT}R-^nWyvQ=[k=A5 ѩV|aZLD G^ Ԓ$?M\k ֲ_v2P# a B RNZl1D5 XrfX$ӱ . iogH\=Oq5O?.uy..48,4OئmiO^򟤶k:Q*Sw/ȭzM+Uԅg?ÜhMCH(kJ/o,rCp.3R:Pc1h|V~L᫳GbpC)Kp䂯 EFgVH*+_XWg;߼ '7{Xp ֤S{i *&bWg7Kn,$ᮜ0(aJ]k]W~Tl ~WZzZI^xq\7GL{l=W)9T^Sc'҂\*Bhhq`{ƻs (6g3GjD.~r1GTG7k MPAi ;y$7pM`v޲F^0uZܯx~PIp@v4BE3 _~S7^$)c⡷c8W|Z? %A(`A]=_f㬳׿J;+TK0v/itza<[0{T2`et[k'5cW3id--ɞ+\ph@Fbݬ]S{CSh6h6[>bua=~>KSj9yGjV $q+7ɭfFe7[,P$0Ids6pDI3~*@j !a.0DQ ;YN:jǭ(̔Sƚ6dtp)c=D3qM|1KAUVx ʛO'zC$rl(G\. UE0qp˥HiJ,Y իkQ v<կ n-:RJ-tv{1{u¯/7ƜJGB`iqiN-ӔIǵfJ(1(y|{+brP?_~[^l ~|(0+pYW5Pc3Y`ݝ>b5GW s{[MK8x`$N{a^B@ g Pq)%QrQciqis ;Tpnȯq6(JWL!HLY4͒b;趋Q9 Rg!2a޵u#"e'b/{ld<x,E-9.߷ݒ"t%bUXUXUxg TT-:\x*UJÑH1vUryϑ{ 5DRs=Xb6Xk#m1S _ l1S@ZK<+ F2PBbafHZ0z/BóJ +ZXeЪ^/s]u{d]pz")68USтIN~9ʺFf*A*Am y׶ɳ4y-5%ŀb^/Kl`/Zf /ojcߝz^@{96(>i٫[*$? ]WAOp7tӀrw٤$b^u ʯt>n:PhfS+E.fZP qt4qbp$9H,@G,`+-CXŻbgnхQp픦ZFB!r*S08_ze%T6Z'CJdsBVhy'EӋ:{h:cHA>4Z&׳ fCŽ6M9-f}x0̈v5v&7T_ m)m/ze1kbcDSf\M"h|I /.kfm8mSik<HZ kNPX- /NnWr%gm'jG:`%ٯB8PE8)SG 9[\y7oGϝ!5i靃V6dlV#owlN:aΞAFzyy5'\H1):L( &P4ޒDy2\M.k$ei)|9&OV[hh@A׺> ( "gZrTsa>|Ż^iN{2k^i޹ftBl1C{!ur!co~;JrôZ_L0хu񲎌K`8:DhS;YB*dSQIi$}E F+BG5,-_|NwTw2ka_9J3UScתBR!PW'*e'}=sV*9+\-c,˕b`M_*؁ iovASIau^& bZ\*9sLG'Ӯ0?,ug5m2(s:3g`ӷ\{B5!r ʴ_GdJ2ŔKNt%y|*}1\\\]~\xvGQPf^l_/~K̗uO1c&2JH- $x͵YYy#!*m(v @`n.IiсjN?`:P=܄i-KK VmIe)3"[Z *[*\HF:}FE B.cލ/UP".W:݉Rq@!~`2ZAq۫BeYj(e%fxxuKz5M'`'ȩB;ho=\Air+oლ ^H}\%Zm }]`6JvwnLW^]1f0ڔue!0,['% E.1 yQ1Kym/r>s6;JwI`w|sofb+qԌOݪğR(?PNN0ZLLmZ+#G5`[cIJ^=+N`opwYZ?]\jJfW/{l9bxuYyH)hJ_t6tKcDl6{VWK}.,|Oi k84:`Wm?DmK@C?6Ҳ5NnGf2z( l ۱T+ύ.U_h;.c읾R%C̐QAU2r+Ц{/EW֋BzP2ϫi|F pysڨ*9KH˫uM*]|Eψ˛]8OLD/W_\ \,7; wiܧJf[%~z^h%S4H~̓(ϞQVneSldk[m)3z(qDMfzVJV?d1UO|ʬfl/h4#Je* }{N aȽXR`ݦ# g_7ẍP W;B;vZ2)5:W(ݝj{@˝{LB 1KZ3Z rh͘f)Mixx;8mz挎aDZiT'))WD鲂lf$}nuN7ie) )P~BYh1y1 <]_xq8'Ŭ RS XԉR!zAr` |h6x~;rWYŶΌ xaZgW%LMҐmCh$ G]Gh9@ )ͷn=KokTjQM[RFkQ1L(~!L.k-ZZqP{E¤FV[2fHbr,ttKuxla ᠻ,li-FwXR> a =zyЂԕc ݫ}B̑1@mf5)qAJ*h% 9kVO2َr*ik:Aj0-i520\0Θ{ ǸSZIOdPzb$7L>*ų1hc>f#,-ÄWUOYʥ?Km7* 4.2tBԏhP+Hie [Q*>=29 m~ hЯ=xauU{O G~nA2r+F¿*U6"BTcO.dNF (ڗcFXZ2>1vnDZ{B4nBH&>0:,D9O@V:\h1(> ge:_ao=\uCPQa荬V[~OS5<}&X嚵I"mz:.aҼPݑvޛUv* jdFI660nRS>!EDȔkUufDC{)Q^P Ko4`|s0=m? &3t溬'O'+@2 - 4!Dj1iE=t~qe~!Ov|ӐK_(CD&~HA8;[Nx|sϜzxWωtk: ̡*PLu6T/'kTρPeӆDF}8? 7w^x( ~,44?@3191q(<+ @i$̂IHn 4v`hg);-"GYl$aQe'>"| Aq&uрg4E #H<|i8ھ]$lԒ.5];a2퉣uք OOX,< LaW#-x8xtǍ- i%c7#f⦅]~YHՑ}nݚBt=jy)N xCcYNZ~?K)pZX1ɫcc4 L{SK^F*/*JoHv$z &WOY4 s1FH~=h]iB+2*ӻ( بVkH+sQrfb ZҌ!?JIDSrU}`gYU<4j%* st8L>aC;zF޽;U!1bռr~HZ(>aoԉ0~ T]ǎgyFҚ.A2i珸 F]nN7h-N`[B}[EtML>Bdv"6|[֗\rKdfEF:O Jn˷aVfSͯ8XQ$G^uN90@(|Xqh0ERSZOycsơIꨮfY (vߤzlcv90xխɋ[mQ"Z/FBvTbZ}y&(PVPJPnA60?jQDBrzam-` Q?d"+# #!~y 8KsmalErfƁK")zSY/6]Ӝg\=P1>8~~EOhRAa/NNfоX(Kc:܇EMD/ 51Sj]HQRH<~A͊L mHFA٧@D(hU׻o4pZH:<ŏ=w?ut& aݬN68qw+8yrIw܈Z%Z݋tcr H}y.m7 a&x' ^b3E3/D@ґ+c *u癹q8~t0boS%9@g!1kL)#5Jd@d)R6o \ Ԁ#օL r;r 2IN=aR0iD(:M[TQR=1Jnv=>g"y&Mf9:|jdB*F?^G_ecYb-l3I&!K)Bb !ryxaxiA(nM?e J ,NAVe솓~WwA-`4~tShbצ)fXSJT"$pj= EϨ ׈L4%)aBgP^.cm^ >_OA;AK<4hXI|G9ȋ,#=Rz325S5 8mB(79YtfwHuFL!y@*G?n[0 $s_9/?5ycBffR~h5?[o (_S1vG= m9XˆFaO;#.te׺f5bE}nﱞz{]BX\azS7r4v* LnFHc\Jz[S mV Quwjla;15ߨ3&F.#^.A2UqȔ`~vko6$v ^vkBB^-Scy׉t5kS-u_nF&._fO+~  |_|iEҟA}}m9X]ͬWFIu8rzŒ ӹR$}͒Y%wDΕh߹jV[S[0ˁiN& $[+UZmKF&GDaL˭}Ѷ|)pzC?tӛ'kz]3\c>u-3?zsHbxg_IߝXx-G!#ufo-rD\ȝs9"wL[SLS J!d#ǩ\xЏ-읹ޕ'J;]E[{]i pTc1EBMH}u=*yTpzxQO!˴P`%JR CxC1! KWCX)"cq@rŇ %wt1is\Oޓpyf77d: ngwWS;|mv/2#̧dd2هcxdXJcR?s$h!Nw! D iJ1dm#I /3ȺYfgizTn]MR>:}@JCIeYE +3+0%e1vό,0 ɣakk0  K<cd QF̃mihɧO6(sgv@\ Xe|% Mf՘]2Lڪep;ϘHE34Y13icr FTbצmJMypk.UT+` u1~9h0ʝVBR'1/[)4ű*FCk#G2LᢅZB0GU&H+)&:I8Rmԍforԝ8`i+ &ϗQ*>.%xjOF1s){Edi\7"Wԧ$#ǚ(@uzvbyBF[C|ۆe,˴5artj"6x"Pd,D!CSP=Λ-\#"$sƣqi2WG (OA0n&<u b~=<@y(rA (cU{OhK5ڧK oo.WZ.tl^$Q!jD.MHi E)_?n49H2FAȁMPסZ~(Ү:Txh#ɇ` I26Һ<+.Y y*&w\XuyBhK̠JJ 3Y3Q=wN&o#2){'v4{A[bO! X';=S҈7ɟJXPB,σ71p,ϟ}vVfwȯ-@:Ex 0U7n5֌)/iXʲ\3\ @}yC~"*)_k+x+Âw;D*b O`0UmMc5# DҗQ,NF i&>H I1x*wds"/BάDO:&ƫs+2ZH- *. S _HZ{)T! p[Z饓\xrKTE`" \Sg%K&^YS/K AVFm*qC p*5ACzt+9&: pz3&[t&0NP/A-+MA0h2oji*nyS]pw΢5<%pbe_4 wS_Д' h3"oV SPsIl˜Ln "9!APf!͍P@|جTu/$H_JଥFxJP>0@θhJ#nj|Os4am(fG MPU@Üq^A҄+}^a3 ]t8WGZ ay"|YKjdI:hkŗGrY:9{]PERtHW#K̀va2Qu!9)^BŹ$% l8HR-vۣ!(lGRm8;gO%/X"VRlYzT+I,B)r7C}^P#r8"K)HލX'`3 9\s˜jH(T֡6PCqI;[%(Og4.e CP;8r.DY ,B,-ՉKKI݊"2rt3KP'.=j.ƥ4NA$9.ƥ3Mĥƥi5㺙,̤qi 5s@!#^ſB~{58V %[$ŵ5ORn݇~:Ǜvob,,Rp#Q h%IЊ"-E2A#J6PDqO*UTy"JZPn|ޅ'v3׫ bcZIݰ~̰MNU!hσ7vǦ?,(oX(%mfx6\ڎ]&1aΛҘfG3`0of63m_g2fdf}ڃ'nj疄Wd7>>q S(/@+)9&k{Fڳ_88ZnJhINvE#]{-NBqdNl' ch<]G3n>~RLM'ßRWMȋ .)D7AG0^Ck՟,+k;k5ܛ n9Sw $'XSsF(OzkcV BkAR vi01P&guV)ް\RP Q;Oa ,l3店`ޚcAۧ*ʁݵz=HJ~bv_ w^X[:ɢz-xmoo,:K76f+kɶw-~f+;! zϿM?[;WIFT[wO7i9c(}JyȠ *eKt6Bx9&hO]eaLBNZZmN#sӷ'@IVym UA& q0@X PM3'=emnpS"ql 6C}<=[MR*F:vܶ/#S3kdWi)r0lN{y. 5KbVpXb]H v*<a RSlai?[hHRSq* '&yxr* }<'H ظnT7E׳ʛۻo7逝CGwZ&~VLTA-0l]=hw;2?$5b{xQiSqx6t>i v^`RPDshjܢC'H@:RXȭ6P|(trY/Syv2d< /';eKN*%~! w:C#3P5ŞҤ7|NȦ,[c!Q:鸴 U2Dy /Lt/1㚒?oos4Cl|+TsO|Ph4~j!=y-(oUP_rķƞ5ՙ%2.b.3(A8r#F=.r\  gns{U·0h%uUI-c D\poo3^!է i7K។HM"h(hp4G 8 aaxR _6x\Pɜ11.JFV? MmfvvVc10ʵfzq(ۗ_Dki©`',L2X Ȩh5X2JU쩔oZr  @{r0_ż1ػqN`3-4VFH)z:0=bLJY'Yp` 9h Rfu5 Q ;pfYlu[YF֒$kmH /{QQpH{9|9gXv _5%9cQ2lpcbXQ8uuuu=)dPEE-KIsn D,(1S.5l%^=@j `. pwbXg%w?E~m9m}4%aV资HmSh^BKH0۫<;CgiX&y}dp&Ypm{InEA3-1}0BaS|a[*ko \4z}Ns'cKಳ a u+ZC3iG=fɤ cYE{8;gWNI9=3FKL˷ [ dq`$>je:#nMq#d bvfsBojުS [M^jDŽf[ #^%~ )o?%@(9}oeQLxWl-+f)r!mӯOe4t!1/Bb^ļ/$nuJĸIRF9G}YJi=4&9 '0Ro~,yatZ%ҬuY?,fgnNMш\9[NAWIױyY 3uy%aR,G͹HOxge1c2:ZUAz% V*y'yvQ&G3l7k=qۯUn, 8˶ قzPG>J=cˆ8a20W-;վ}u*½Z{}l3w 樛e; &V,(Y|LN_ՎF[IݞJ[xV+J.-;Q#ro%'O54舑hܨR3bIoG͵j)/(V"ޅvo}>WLGtI`]zѝ4C@Qw!Q@r2b- H 7;:hӼ% (%C>?/z E~\P1E?& .5›B`0 +Vg0a;03FiAr0Z 7Bw}~l)H"l:u.qes*)K6hjg| vǟ"N,l@: Bs wό̂ނԼfAXۂHqy~A3cƞ!Cb'q_G۱"c{MR7,c o<]cGy5njT%o?u@~JA|\z)cvN2ux`ź~gD5 "DZ(nio7Cz~kG_}<&VU4)Rt!9 jB *!3AWSUEv يhmrvwjoj`GX#T+T:uYUs̚)pSgߧ60U r=_y[ |\-)ZdD;%4!7-nY,-&,Mjb*^rLwiϱݢhv LYSP1S{LRhԶF, (t~'Ajw5ht7( dhTʂH~qpAd,Srn+V kq!,7GCo0l,++3`ù]sAn@"] 䲗;xvz,>_wo\E:lM*`}PmWf$cu80?>Åěόq<ꑶ9||}+q@}+ް2Oѯ'.JzCIf#}zfw-1fw?/gwzs"-/g~~TU}aG i9O3|s11n~:FN1W±mtZr)謹P1r-G>8|Ґd^JT=W?ݼfE\ )w8"wN)PLܡIX19mrLj|LT֕D#ɘ&8Nو0#IAi V~}oNCz+N ^3or#r CfHU$.IVhF T:961"à~'ZCj+RNtȘ- r9g"A9iV0} =21.#˖9lS/j].A+)8{((p:Eȧ'l0 r;>w)4)j8l޸=(O'{ u4 xsqE8)Z)8YXZꩅ>Xiq;1>MK>kZ>|O0xX](Α3)0Xs8=j;w}\GPuhxڻ|g'qAc#NS1ZIb#N֒ͅ5^hj.":tXB &a Rrmt&kZLv^0%Eąv1-f*M2JG*wh1}:tDf<c)* N:TGEU I)Ln'wZ`V]l_C(hz_Scs/~o.9_E8z^EU'߸O1F#2e ٨eG-I!s^q44C,G9Dg=ҵ-zRelDLCuO6!JiDPD ,jsҜ.s%d`3zКE  b\%Iz ^rΨ!=F !9"Ra f)s|5*y )!TJ(}x?q6:_;Hu,~/ >/ ~Bx"ldLlV1 YA'6`pV%DH}9+I(œBP}l":ά[:TpmZl @)f^|7⻙ͼnˆT^ 7:;I3I-".@Fہ2ɹ(Yu&m`%4$وu ivw^y͋h^FרϺ0#ьH %i IƬA΢u5ĺ̺=8X &ĺ楹߼4~~s>o eDz}:IT:F]15 ,:|_!AfցXuY;lbP?`MIE3\&Hu'`f$c27$5,21E>B3<:Zcb ǖ皘:,֐rHGN^U%|({NZ bm G#s3)I;I#䴷6372g$$314"&!֪m 9#j zR6!FsPcjj1%1(4DSV%z+2(n1e3h9DG\ZO܃ij+͓-1cX3D.шbGX1] l_<+J8Ci~1J#%^]rMG!ۙS/U}U0jU>lѝا?_V?_\sEa齸ur7׿n]v~ r "%WN.j͈>3Z`60m ;;*n^'X O|Β)a $ qtj.#3N`' D`FEG7,][s7+,S; [O*uM*d_e͍.<"~CJ"a!i1ej8h|htV ayQʏ19+, Ww)4#T3G1&!f.К9 l0$@!H/$3յ-5LRLYD5pRi8ƭn\`,4`  fܗFJP(!j#vcJ QKۍSy,Zf0w|9FjF e :aZ=^JW\GA,6yXu:f.yy f[d-D@rɘDYcd 93.Q[t/gW| \ӻIӜ L);ZXR-E_ِ² aEX@sJ"Lg4I d|+F*$ g,09J́)lSreQѦ}45+Lܿb6bV4*Iw=G7/.B2J:z)f(6fj D騺l$6:w3{2ͤjÛΎ(D{^`]!&FRؠ@(];Jkä3ט N5d_s%ihKG(.pj,UfU0r8UDgʎuq#W@2\lz4%)9Н@).]+KgM>nP8yD%e{eٻ"}SP5U  eG8iC3$y8-DJNX?w8888nZ`t2)*A(Rk):HP2Ts#@Jm(ttn;RI3^ajKEIrM<`# q]y¼$QNqX1)="֔kZ'!G^eKw@)Nv@oظ4QXzF[j#D 7;Z :l*~]94T@ȐQ1`&{\`v%$#:4&lP8-]X(Wʥ-bL"x61kp)E'Oǥ!jZԼ-Rli%؏Cf_C +mHQSKǣ .6_zPV E&ߡ}؜<_{Qc2{☶Mzo&D㮏ㄓR2HVV >%R9UmyExLS@<MaW@Om`jQ**$.V9\6h<`r\!,nRLy;v8nxiōHCw7 v"&t[]38;N*ԒUy앷 }#x@d?4yV /3J $W6U}D3f8QfL J9ҒSaE`Jx붍#mZA t,{a# v ;ߪNaLZO;Dq@i+)_g}_w B=r?!֚\ R4\~LЭE>n =凘%B/ںHsg;Hş}l}@nIiewXc"{u/'AdΡ79y zιFؖuj4_G>{7ڼVwphi( ۬%ﴇlժ8}qarT^exW'w(gDmܪû}_;V충k>lj<4-ߪWֲ=(Isc#31\F >,#FW׾nb3§I/YFU6G¡i6=}vsFѽX0X!z3lO݅$GW,P2K`*S4Ji ,8O +eDhClA?WFfeݡKV7n@c.Yt/t3RfY(,vcSa#fǼ :TbL ALqƀъ, 7TKm47xivc6pw n蝡J"#߯J)/P:V4,QwJU&huZ.ʊb8 ޛ߮;]^^x%GA/j^9V䶱\]^_W4#ۗ*&WyQ6r8:N 7s)6$>}~2dUt䏡)ҫZ 7Yfl mNgvv+rI΍6n,HN΋(-?"#8Ua {6?5`a68=8ʓ:ֵ|3AcZ;-ۛ0Ooo^}~ϧ)3kcZOh^NGX0GxbU79 8ppb;{who~>SyT+W@Y! qA8Uk߼0/P3P0(P LB J]xb ՒDgw.f d,x2rϥW܀S:Q^, tJ?C'$;c3;wpFgTʗ}kF@5qF^װ0COo_k} vd*Kƙi8p*BhT3Ҩ%GOd NgݲR0BZ ZIpJͩҎ%/j&}:/4 0J J4>ѕ"n\i ~xC֓GKI=K 3x $3SKW)UŔ>0<$- u"Aخ ϓR4դ1]9eAV\^Ռ$SYY,ϐSEFL#5K1#8~MjݨL f\iic\yvz y&cSqߡpkލl{nNnmI%y쎦z.,䅛M%2xK∫G_x2g!Ʀ]#;`廑 !:Zt-uиb@t$ tϞH鯲.*&B^zNdXJ8év:ρLazs#&%'?X[\V_E Nw23ZoQanE%zn )DTBеJ&6:wm=j!pP/BX߿|` xSr:\ix7_|+Ɔt% Qlx7ѧeiu+܁]nO–6[B*G2h_-"9yu%BP7f/6x^t-$pzpyݣ8E{Mt5^Sxߑ~-V%xI/)K u [.)hyc-4$,?ep4iӸ9ILS",4TSH$Jn9Dx] c8A'S&6EfRFtto+E 42  T@ 7Zϵ>(&*G5V=qR(Qkum&u_$wㅣ]:XD$TF(Idy'gJ1Q GVAf|~h-X.V 0h1hd;68nzQ;gWP#iGwϳ9E<oZhF>ޜʽ؂p: 4{@V4Zb [L)$W5>K`M(̇˵@M+Y,oH9=Xp"%5oGCU<8' )cIR$Plin=dch4qm{(eq*%{]z5I7?N(1W;HL,Ie uE%IQsN&SS6fɻ%hs{Җ6eRl#q=afRCۆ-@`Gd=١7ٝb-)g:^cMa)]DDy; ÎcݝG_;v}kÎj ^KV!(0%D+$Pʀ_㘧.a6#t7%]!b 3i":-ox_IwumXy,yy`3.qc;ޙAedT,R,Him9׽hJArZ'<;^p Qq8vb'(9Ō9`Iph%- &΍p@hzL_JaXU(i̵9 :ѩt7ywlF#ו$;[wFe< |DXI۠'ud< =,ZC/Mkc}3#ȕD `'L(r\\2';)-h8V"i#oh t"_. Iԣyp#L巯Sy=|HjǂQ)%$_L6iCeSV;.25"- BrO `4Rfoٽ~3FQ eMroFOgw(s)`f&s`R̺3MQ΃AN'N6#$Zqc⡙9I9v&PZ܋端_=u:;lMd8g{m[GaO2:*a;夈|4cei|WuW*TnhNBa9FJj?ܾ{>hNcD)nИսCqD12ZCb Դ/9yԳ'2;gd.?ܩHo/B hUՖq+mu|_l^y۱mZ^Njg2k JA`o#Ϲ*9/s.c@a2?oh9s%lzHLPޝ9T>l I$k,S^x49ޯnh#_czv\dֽ8')kp]Ǜ[z%NuE6=HJ\bd@Eu3p~F)م]$$R[bJ]ʥbzCK:0C/.WaKg9b u~݅Yq#|u)_K}Ǖ׍qعOmc,()8:$EƱd:@~fZ ir+2o3)S&zp]E>8X=&K[:=u}ikbywkˏ.Wq,f~~>s~j9?ǜǜOp9WXQ!+pb ѳBQ1bQc"QZK]m_;ߣ>>==teY4oY[ߜgg{Xu>K]}/?Ll!!&r},>3:$>sW)ZWSHUẪHtEmM[(/Dͱr!0E,mhF !'$>[8@֊ 4V֍Q2ՕVG` 2V,u<9>WR iPMbB 25HƭVyoSMk4FcT1ZPp5@uߜ~;d D{>'v^tBB,wؾY0k[_9P[[ɻHâ[!IˠJ;}(En<5 0pޣZk嚴RZ4ZDInvͳ͔Z&ra*-~n}-Zt٭?,ZUz@#U 1m>sa[M3ҵY7YSZįᅣEukx8Lan`~h+Å}G)-,I]%i\~f Q06S؂P,%ߖ^Po{Nc4,w %B~Tv ͍x!VHlI^#HLY:={yKd.~yPmFC/C!!ޫ\C=|3ٙ<;[*/:;2x.;kC3''tb[itv"1S 1pQc4cpd ,F] % NYOI„z0xQyFRLX<&yNҢQH{F\ďh>)(xn_ $y5z^lЋTH!kݺD wvK䆺4oqĐp Hl)"뿧03qV[v?Y{=<=~o{Y}S_[?:wtsw[l^}Ӷul}zj>q`DИxAE@;:MV26^ $!zDM8#hH)|)Q _F Qx.ȶA١ O)$I1< L g "h$c59UT%vFV[Մ[7K9붼HYr{/B|kS&xSl%x:'!e3/5EfD_/9$(L@SR<]r\TMKrK~"S̫`*$_؎%ej}# EE1-%ͷqPhҲѻj:H&{)S%%S–) *qqN JPb\pQx(ic#&nF-BNR Z+Kka Yid(mXC œzȁ3P5 4#8aDsb@80nRkIHT] G=wt,UhEBWh=_r+4$`dd\@h=}F$ , jF@! )`xnYIM*3o7ڙ ̔|ܒn'74%*slw=ɹD 漪v{=3w r X#TH`j \#Ie8Aa83ͻu5 *ma-jTPBL@0*6Cl7z2;kkhXWyvpSD-,cn&F0Vp hm)5ӕj "4%uLQ2s+q 昝˪|{X$MQKM9nˁ%t<({5~w _T!%˔ / YtE#anFFSBZ ?;\ĥeGZ.SI:gzZ?p㷝Tzطڌ K0$y Dzs8x[06yL޽yw? ypO ֏)薟{' 3K(z KϿN'VIX@^]֝DJpB#UJO| G FpxjaX &N_QJLtb&`NxEIjdbBW[JW' 0銸<1 :=Hlv>A8u+\Vt@H3Ry1aː[ ?9eB ? y=S$9wMuZ?fuM(ZX G٘^a c `Dm|LUQsizB紑AO('wIB(T~&n3g )I11);(zBI[ODzB&Ec u,P\A~c"K4mfrGC_:J=$O0GmMwoYO#Ogn[}?jקf+8~w7nxlSC3+k?wk%-?5=пz yG na6W’y-4f'u'o]~sLlyؓ @\C˥'U]!" 6Q\E,vO~A~oI?|駐ƻ%T1p8Y ~O}"F~:*dAzn={|Ӄ/A4#^T&u{jm2Bi$#%!OmzA/-) -uwʭ7`"0;U%-JyA6bgdF10Aq*/& eX)B|Ns]#39f۠+V W/ÏT˓ţReXx0tC~}%q)Qn'G @Yi~lL8Fe{9D|ʠ/8pUJΥ8ץ Ѯ$6.FRE-%xI+~^rJGeSɥ]̄)R~ץyr Hɵ.fJ 2\utFSO[p%܏bFi"Y !#a}Pe oڐFT7`eHZd*ܠW ܸ_ROIޯwn9FTպM5X6QV4U0nTBN^8իcGm~y"ayݜ_+Vޭ%ʯ]pY6̮quJ%Cy7һxՄ,׼ty5a\=ɳ߽ؿݯ[Qir`/ynZ__ǺMשmm"<blˬn*.0XTM^F֚j$ lcm87WX9<Ԟ8 KO98[L)k+hcm-Q4k5D#+mn8#ҡܜe:0/řBBsG<Ù\7t)c;swoYmhn$)/?M(SJjפ[\I=?uI?a~ʑxQDHD!j wHp6`>mwrMgxl,']o%-e&yF 03SbX/.~FTEG%)_xYF!ŝ7 d`GdF$^bo/$-CPfx3 *:t#. _03vY̻s1 g]1>yy HJNqC"[v)Mq8Q`Z:Z`+nƙj뚆7X2:ǒŒeqcRXib"<4HFߑn9@/d>*֧y J8MC)Hv)`ƺ*h3_~jx}>߭~+p{N K(xQ,dEN P};as f vq]Ȭi` (8uOw˕pX0,,QN3]Ɲ9%'(ri_ÁXbu>6򟂼ȉ/;tG 'x O(yL$Wjhh"S;~Yj 1[]xƽV{0sCL%V,\^LP{B^K C 䆾EWw+fhE4c`u͍reg5HQע9*Nbԁ1vO+Ģ[f 5hqSs(~Z frUX6oNU+;W5<rJ'v,pao.RhT˘PT ǭLR X TBAR1/8Rs ДH !42vKV:hZ.u†6,%YWHE^]_㣂 2*˨. n- a.8剗U2D LN ŏq67,xO~vmV.M7tuY m2f4Tj\>zxW~;aٔR!q-4.v^[Ǩt%1KN+K 6wtBv`ggDͽUFR-8eCުbQ$j,p( odA)L#H$%a2Abyơm@x0eCwjlGk[ukɑf2I`;\.BjMgLI)b&bھΒ*74)%>()ǵɈIRֱMo3V~U|h3,qOO7_,R&9.a f t^7ݏoZ8[jVi'8(+\)]&I_fS85%5*/,V$ݸ9L^ij.g@ht8kN0*$w,Jм]~(m:Pr@+A<жjFTlt>{P ApKYL>oiP{$9)%hhg@3UU, 5 aֶu6hYˊyg#jK<;΂ʾ`4w(L/]'DpNo` le2U1H&A)n ]@l h)% 駀.h<Jvyc,pF @﹌-J"y D"H4,Z:vyD Ṃ]CfŤu9r 1կ;e;dO'e)мtʖ{ybDJ%)b=j[s7W} lL+%:1iGVujJTp8c҃3s@%枦(pRv8(FYh3F+i@A C@D[̐ImXDd6M3 ʼna0F߆h}㉔Ee(KI(侽+$+ XI@>1.OΗ*)ӻebD+8O.~0kWGDb_i;C%t)pD뇥ɯ3f0|yNrEdXEzCОB`z}2p?r=w̲ F״\fw뇏W77~'p xA9&9\&%>2.GѴysssqW$TW&4ف|X7a n}c-MQA?@^:[S_: GG6>56xn.)J1~(D9s.p/,To?yL[ޮ'Z/Y?j߾]3B G {n><Ze-%SH{]#b:\c?ZATTk RcEE9iMn~I|jZK;.-.<:|XOqνßõsŦij]Nr;af1Lj惡c)z_o}sX#O%lݩ>Qv6X{M`؞?w&wfn~*1Z\뢑$+A2:^ݠm()3tvDyjlmxD6|":@ rv3Um()3tvSa3j-YL%Vgত~qMFϩ(N6Jvywtyn\Ļ(;/vyq}U]l멋'5MV(|>B$jPhiswp٪V)(yډP_?VDd+"~` {{]bp@-zs~= {srpIy2lytJZtG`J:XRvS>Dcd)V+$v[+\<3 ƴJ(Uaw/MTH6_9OFgh\(Cp UUTl-@TL~SmdE4^XDcL%e+Yt) ?68'MT}6S WY1}ĵΒb;/lGqE_qXi]aededede X%H-$14&aࣞ:85XK8 ek5gUr늡 j]1tHơԄP>5ַb{s!D%%)#;sx=\Z"1 |:sm(5ρ\{_@m!2 CV h+b`̌(PqR6v6i31[m#t3eٶQo2.=s!sț۱n׈ MďpkeaeBĬd5Q&/,V!3!Rs¦>yHjZc~@iWG?=ENHi|=IڗoOR>t{Ҍҕ_2eo&T%9I goO.drQ,`YxDpXIT\YL= o,fM(4Y5rzSаb3YJ#ʙ: rgUDpdə2Jm:1j 4 98 URA;*.MQq,1^5{rKY9co<^Hޫ7~?ܬg( 0%ʠm۳LXs)3u^;}Ҹ&l])-QlH%y㌷ㄯ)Unxv7-ԦM׫wEmvS Wb'%:Ŋt^| bzJ_,R?vmLtWE ,n Ł:e?`8Y=2DsNGeqlgK6~oG9َy>Ƽ2~OuOHGdSc/ r " ~=zRnsD-Y??H`ݘA|e{ E=]55j4 *euXU zzեbl5Ftv5]˖ 4~1#[ Uk\Fg!i\_$k+Qg$o0:)Wh]5N蒥jV{=՞GƋBoτJٻ!z+l AI?2ʗꄕ+Ԛ`y_u9Rj )#*+UÐ m|?|\~\u(<#NIN,P^;lJƗp[td\狜tСgibYUV1N/uAP}i FHU;bw)nˣb5%F9GVOHy-x-,!e\b[^l6:d 1D` lu^{wmiMo|W˚ ǁ }39!Aݶ+ٻZD*өa1KTZEXz4Ud#rp,?MS6w ^IBAJGNc3Vᤳ^N1ZKE`U> 4S\3u݀eHi&ZI*yfJ{G5hCEpur !xv<idze',sqS%>h9c۔]=-0ڈ E dU(^\,#k6Fٗb!D݋ٴww [ͫR 8葽4.Tb*z :*WkD4%,}C\n}} Ndd/Ptqt> 4Fȹ&tk2z#HV*W Ўޤu+em)zySeV/:WcN@X7۟uUf")c ίc0^^WZ=jg ,a 9>Y1:VI,%SM_ V%2@uX&~S󑹴֭n|e쉕B\;()E΀%6E8 d ?yiC8?:⴨@4[ZlckșXnGBPjy[G+ܱ?ԕJb@Rq+c5ca'ï'(~֢⓮4HTf[T KƂdv6w5ݏ:i;8 d*le1b5S,̞sggu6k=\vb wJ55(Ljg:#o pԠ &t~4'HF>R쁿 D:vY<: ukY$3#S7X%4 ?opmA/tyL(-SdZcM!B<_EFc3}vRQ<|E0\d >{b\2*?>Ra s{ɑR~JjcaW^:zcO6=YWk55p:@EJج|zS:j[j)~UUy_@`yW ԰ R3=]-a~]j8se-)F^~|Jv],Gh#  ߫O QmH 1l;BUr: N`(k>;/@ 7]Km _W}A'_G"wX)y; $ CA&! ʕ L@tUEt%IIku=o7^W7iloS~{tT`IYRaÒobDQ8= jf?ݬ^38O`ͬwsP({/hB x>?3(ύwkSXOi)cC`|EŞ>2J8qU2xwWz|Csw+R"cQӻaY̜޺? [H?pÇ( b3)oP7,~x#~< ;ڒ71n=ӸNj (!fLJ~qf^kF9jnlz iC2"vWvYZwvS2ڌ,lox+!DlJ؁DzyaEJpD]WRщYʶ e4Z H*ٰb?J<899rq sZvKE$kek.1Gr"Fv[GC /=f;rwo1p,Pr%#ѹLIU|hSI`9knDzmMۆWI]N&E 6@*EgHڮ#$&)VkRY jBozm2R 6edVc IV:6.C9!gb~z|g5SHnJ646tֹQi5>da>Û9cqEd {Zmb].@ЇR7&rRȿ́L[({0sMmyMêҗ9=a:ws O~/I%~{{rįm߇M͎翼R;!8F߿N.Kz}ϴXϰEWk+4,WWކ֍AxZol͘%lk5`GAY&T+}wQ0 tagDueHzQ Sv6uh/!Tr P55"'H }Ӫw.9dG%ǖfH-Ė/^-sWW:(~k\6T AF}:74+_JȟT/_ y/Ҏ ެPnx|ʼn69NY3J]]`>GCdp*@ttͤU3@Ԉ^ytb&S;&ްmQK.1`d 6B 5fm-L\l.ؚwmml n+N:{v6 02ŗOE)-E=CwhtHԺzحGgnE=RF";d'QXd+>0+ًͺՋd{#܆Ã[w_}yr Q ׫pSjFюtP!PT`HY@PN_RE]V¸ #nH;[ Cf)Qg&h;H`]ס2SPGSˀ:zKDtYK5D%*k.{:o!CR:~HUnCϱqɃTL96IZc/c5؞Z?50DR$j=gLo9C I-6ϰ1h:ǾR^ct`mUmik6Ÿ\4uIe)CAA`³ /k+Ll6f$MCan4]笍эM!ֿXᾗS~'i0|M>j*+Vy<;뜭b͒WjkKȥT@#kHJ.BbQko!vծRrW^T VAYrJm$h_(QsA]^UV٫ k֥3ko0z T63kyNGB%\EXĢ4U]`*mZk:lD@M7ZpbYFU nQޯ=-tw7U.b&&\ȝJvDGG:q#;#ɧS %b1J1*lfk? ?c7,cp/z~ ~r{mY!}kkAm~ɵ4z#j4[sr':N7+{xB\eu8IX=x/9~&@S8NyeB@~_2X(t ;W(xB9֦ zFE[6/1]&n(,:e{dq( uךw}ӱ Q=p\2u2-uVTK]hzUw2]οϿ^El/ѹT3 <_:e%53 ٠N> OB %WUd_ ~3k\g]MyeVG可7tcEybezy#fXtw.n(4WKߺipk ^1"_uv |*Wo;B3hLu*tu;`~2ֽSuB9DǔM#{TkP}n]ecn\EN =u^3Ӻ!w1"S 4{`U$8e =dznz Lln_gCspv=:>!ZI T֭v *qZWXA C|uAS.WnFܙ|| 髕RyFʯ[\{ ~Ilq9_n1e;Y̯W/?)HobIj~:x1V/9k1̿}3m!!N?],؆|}\5{:ȫ׆Zͽ|yK ~]{i!r4B:σy>/q[ݥ2+t3Rd(K[ݧmYP #{Om'<7u%ηҖ3O85N_=ZLƎ[hPkKm)g޾O$=V<<Պsh]6n ̲RTLCe|(flņUp쵥v]g;vRf;򨦳޷ZSd+;WB۞jChAQ[k[uڹ=$d^ĩ~ߥZ H*̎cI>:=M:uɒ G@3hL`ݺ :]hcw%6ۺu/ivgNuknкuAtƺnɓtJɬ[ my]Tfv4)WI݋Ǩ>G5:<%uoZ-n:^o;yzC6^o"dUI_^l?,ǪlaCGzOȺږyW[PdȘTQߛ˭eKVz:-Ӭhye`3U.`A] }V][iYgЙ>*]ydsrnM~ArE]F+ŽaJ,} $aߛH~2Z"s2\_N}w\aj},_d%oD\)e~pPikz=lh ͽ>i,ِDt 4ۧʾU^N,lTP?9pF15)pXhٴjue]yJϛEu?ow hxͫ˷HDUDDg mmX{wv完;r/=״o&@0eʷFQWcҦ k_R,(zQ |)+ Ym&gfnev{fr^~xn PJA ^ecS.&Cs+Y?]e3K Lo]o)(Z+^Fm (;@5Z ۞D .*Z0Q* iSUX4 h `EN+Pt e)eK`|VlrFj`TEkTL;Pմb9e:eeΐJ0PTRl茪'CRBu5sj> V.Z4 FhQ0m@ z A t\WpR;uZpNS#DrGo,w^7 WͲC|ڣ##%_|e$Cc]N qk XDQg=ւK~DAJQ!1dݬvxxv$R458vA^,vu%YOcrKY ,kM&TJ&uYfۺ4ڨ`"1p4 ]ʊܩ *LE"4A$q R"8(w)55ED B8G.GC@MJfǐ}=T?}ݨT3`b1e+]Rݬoku,{]3΁υ[шh8vɠ=rK5K(W6̯0 rP`TEC.:_=hahRZL}tpn WϸEk&@fqlMt΢"Km6J^!PP%Egtqoe[? )zlVݸwL{D.$}j]d KNǰPUڂt̝ؐ.851Qyܩ-P*K}-EX -[]ƍYD>K \TJg3mȤ2휵sqѡp<EjZj,= V3aV 3j%FbaTk0?9fmtGC~la%N;Q/Eֹrz.Y%!`K GRiP*wZ >F:?'5Ju-K(ՙtI5@z LjR<~0K5<]J%}KuA4>Pj^kU鹡B~ni P ` ((|YRmdXm-Yi+/x)%")Th`>͈IOvP7"hEy1o]ΚT(gI[m;$sL]gרVT`~(Ggolg6sCbrZʻ̯[JHvqyTdN}<Ŷmi~Xr /k_{yvDg؇;=VíD DBW.yuWWZbpDbۛ%kFռнOpԺ!{?2Iۛgi4@<,Aw4#lxnpevlS0^xWDUpơ$҈pZYQk{ok vаbUOza-@ݚt&8dp×̏C95pTqʂoTx↏idU2n2^F5蘫U1"d۲ي (1jw< J'b u|"&9K78^L⇫"\p(b?,{mXُ0$lGa/+Q*|U٬KWw1+rBh1ut:玄~zi<- {Bdj_bRf^K<:1\>A*/2X7'(*f$R.-cJs_dُDs̓6c]owb}ãO 4<a冔kW{߬ػ=ɬjjq JQNyNYM[1F~nIխL F%'SsԦF 10644ms (10:54:29.359) Jan 30 10:54:29 crc kubenswrapper[4869]: Trace[141734602]: [10.644222079s] [10.644222079s] END Jan 30 10:54:29 crc kubenswrapper[4869]: I0130 10:54:29.359643 4869 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 30 10:54:29 crc kubenswrapper[4869]: E0130 10:54:29.359737 4869 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Jan 30 10:54:29 crc kubenswrapper[4869]: I0130 10:54:29.360793 4869 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Jan 30 10:54:29 crc kubenswrapper[4869]: I0130 10:54:29.386120 4869 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 30 10:54:29 crc kubenswrapper[4869]: I0130 10:54:29.413113 4869 csr.go:261] certificate signing request csr-fqn7z is approved, waiting to be issued Jan 30 10:54:29 crc kubenswrapper[4869]: I0130 10:54:29.426409 4869 csr.go:257] certificate signing request csr-fqn7z is issued Jan 30 10:54:29 crc kubenswrapper[4869]: I0130 10:54:29.466846 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 10:54:29 crc kubenswrapper[4869]: I0130 10:54:29.473268 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 10:54:29 crc kubenswrapper[4869]: I0130 10:54:29.928827 4869 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Jan 30 10:54:29 crc kubenswrapper[4869]: W0130 10:54:29.929408 4869 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.Node ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Jan 30 10:54:29 crc kubenswrapper[4869]: E0130 10:54:29.929490 4869 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/events\": read tcp 38.102.83.246:51238->38.102.83.246:6443: use of closed network connection" event="&Event{ObjectMeta:{kube-rbac-proxy-crio-crc.188f7cd923a7933e openshift-machine-config-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-machine-config-operator,Name:kube-rbac-proxy-crio-crc,UID:d1b160f5dda77d281dd8e69ec8d817f9,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-30 10:54:10.627826494 +0000 UTC m=+1.177702570,LastTimestamp:2026-01-30 10:54:10.627826494 +0000 UTC m=+1.177702570,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.068658 4869 apiserver.go:52] "Watching apiserver" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.071747 4869 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.072138 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb"] Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.072539 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.072658 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.072738 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.072793 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.072816 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.072848 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.072868 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.072903 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.073531 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.114855 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 02:16:58.419231702 +0000 UTC Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.115905 4869 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.128169 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.128188 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.128287 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.128321 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.128358 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.128440 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.128535 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.128541 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.132408 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.147756 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.167650 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.168057 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.168160 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.168305 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.168403 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.168414 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.168475 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.168496 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.168523 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.168548 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.168572 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.168598 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.168623 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.168618 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.168650 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.168783 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.168822 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.168845 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.168854 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.168904 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.168967 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.168990 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169011 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169030 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169051 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169122 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169048 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169147 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169142 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169175 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169235 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169261 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169286 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169318 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169347 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169366 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169370 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169436 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169446 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169512 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169533 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169557 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169578 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169599 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169620 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169641 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169661 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169679 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169698 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169743 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169761 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169783 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169803 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169826 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169843 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169858 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169878 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169899 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169919 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169938 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169988 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170009 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170028 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170048 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170064 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170083 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170100 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170117 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170136 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170177 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170194 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170229 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170247 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170267 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170283 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170299 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170319 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170342 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170387 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170402 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170421 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170473 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170494 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170515 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170533 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170583 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170601 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170619 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170638 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170655 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170677 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170695 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170728 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170744 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170763 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170782 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170801 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169593 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169789 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169859 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169987 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170120 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170197 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.169921 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170907 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170887 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170381 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170753 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170792 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.171257 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.171277 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.171305 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.171407 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.171512 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.171600 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.171699 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.171771 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.171834 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.171967 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.172050 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.172009 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.172107 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.172155 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.172520 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.172615 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.172697 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.172761 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.172887 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.173185 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.170822 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.173906 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174010 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174034 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174035 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174054 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174092 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174115 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174135 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174218 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174299 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174386 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174411 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174457 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174484 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174503 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174522 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174543 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174563 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174581 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174604 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174623 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174642 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174647 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174664 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174689 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174725 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174742 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174930 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174952 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174973 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174992 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.175009 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.175030 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.175048 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.175066 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.175083 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.175104 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.175126 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.175149 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.175178 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.175197 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.175218 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.174954 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.175070 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.175136 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.175278 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:54:30.675225887 +0000 UTC m=+21.225101953 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.179456 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.179488 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.179513 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.179539 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.179572 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.179592 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.179618 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.179639 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.179660 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.179660 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.179681 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.179733 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.179757 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.179781 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.179807 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.179773 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.180008 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.180033 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.180064 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.180087 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.175355 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.175376 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.175443 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.175560 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.175632 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.175669 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.176223 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.176238 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.176402 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.176560 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.176763 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.180201 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.180214 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.180302 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.180673 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.180697 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.180752 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.180773 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.180791 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.180857 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.180881 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.180899 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.180918 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.180940 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.180966 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181035 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181055 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181079 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181098 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181124 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181150 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181170 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181188 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181206 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181223 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181241 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181258 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181279 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181298 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181315 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181333 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181352 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181401 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181426 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181443 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181461 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181479 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181494 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181512 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181532 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181548 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181567 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181588 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181606 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181627 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181645 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181668 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181687 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181716 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181735 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181754 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181771 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181790 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181807 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181826 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181882 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181912 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181934 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181956 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.182004 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.182031 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.182051 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.182074 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.182092 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.182133 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.182151 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.176909 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.176950 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.177015 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.177382 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.177476 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.177601 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.177850 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.178104 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.178287 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.178381 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.178388 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.178484 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.178539 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.178875 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.178814 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.178946 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.178978 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.179068 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.179130 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.179262 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.179284 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.178882 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.180544 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.180755 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.180785 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181335 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181370 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181755 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.181827 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.182071 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.182111 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.182131 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.182161 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.182450 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.182620 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.182699 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.189832 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.185844 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.186951 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.189950 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.182735 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.182943 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.183347 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.183582 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.188654 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.183186 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.183161 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.183486 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.183480 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.185019 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.185453 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.185718 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.186401 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.188086 4869 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.190050 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.186564 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.186588 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.187110 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.189069 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.187188 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.190104 4869 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.190131 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.190235 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.190293 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.190540 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 10:54:30.690507594 +0000 UTC m=+21.240383830 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.190620 4869 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.190683 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191141 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.190766 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 10:54:30.690742571 +0000 UTC m=+21.240618637 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.190839 4869 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191262 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191292 4869 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191318 4869 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191337 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191355 4869 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191372 4869 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191395 4869 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191410 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191426 4869 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191442 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191458 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191475 4869 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191492 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191507 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191521 4869 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191536 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191551 4869 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191566 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191582 4869 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191598 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191612 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191624 4869 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191808 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191822 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.191830 4869 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192069 4869 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192087 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192103 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192117 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192131 4869 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192146 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192161 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192166 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192177 4869 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192195 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192209 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192222 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192236 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192252 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192267 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192282 4869 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192300 4869 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192315 4869 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192329 4869 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192343 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192877 4869 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192910 4869 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192925 4869 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192939 4869 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192955 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192971 4869 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.192984 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193001 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193015 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193028 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193042 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193055 4869 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193069 4869 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193083 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193099 4869 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193115 4869 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193129 4869 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193143 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193156 4869 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193169 4869 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193182 4869 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193196 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193211 4869 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193225 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193238 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193252 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193265 4869 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193278 4869 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193291 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193304 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193318 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193332 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193346 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193359 4869 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193371 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193384 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193396 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193408 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193419 4869 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193433 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193448 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193462 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193475 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193506 4869 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193519 4869 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193532 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193545 4869 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193561 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193574 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193587 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193600 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193612 4869 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193625 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193664 4869 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193679 4869 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193692 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193721 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193736 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193750 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193763 4869 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193776 4869 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193789 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193805 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193818 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193832 4869 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.193848 4869 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.198744 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.208941 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.209007 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.209285 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.209385 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.210359 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.210417 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.210458 4869 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.210556 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-30 10:54:30.710531437 +0000 UTC m=+21.260407503 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.210753 4869 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.211645 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.211682 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.211697 4869 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.211788 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-30 10:54:30.711766332 +0000 UTC m=+21.261642388 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.212688 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.212989 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.214429 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.214788 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.214966 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.215084 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.216670 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.216958 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.222856 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.228113 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.232033 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.232223 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.233109 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.234390 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.234856 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.234884 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.235108 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.234934 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.235624 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.235742 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.238932 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.238963 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.239114 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.242468 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.240316 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.242040 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.242415 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.243363 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.243470 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.243731 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.243772 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.243869 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.244799 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.244101 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.244345 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.244452 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.244526 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.244539 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.245303 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.245404 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.247271 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.259998 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.260014 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.260913 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.261334 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.261893 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.266510 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.266725 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.266774 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.267619 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.267911 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.268052 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.269597 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.270305 4869 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5" exitCode=255 Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.270404 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.270501 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.270619 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.270818 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.270981 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.270999 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.271015 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.271018 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.271046 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.271139 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.271888 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.271942 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.272186 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.272356 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.272415 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.272522 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.272572 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.272738 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.272888 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.270549 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5"} Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.273632 4869 scope.go:117] "RemoveContainer" containerID="9c27837499dde2cab3a368980b23ff5ef72b883b7f7b313fc6f2dc49d3d1627f" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.294550 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.294700 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.294826 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.294892 4869 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.294951 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.295074 4869 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.295132 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.295197 4869 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.295252 4869 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.295309 4869 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.295386 4869 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.295448 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.295504 4869 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.295559 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.295618 4869 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.295671 4869 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.295753 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.295834 4869 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.295900 4869 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.295972 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.296041 4869 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.296094 4869 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.296149 4869 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.296207 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.296261 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.296310 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.296364 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.296416 4869 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.296466 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.296518 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.297537 4869 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.297645 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.297703 4869 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.297800 4869 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.297856 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.297917 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.297972 4869 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298035 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298091 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298142 4869 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298203 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298291 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298215 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298356 4869 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298435 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298449 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298463 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298475 4869 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298486 4869 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298498 4869 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298510 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298523 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298541 4869 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298552 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298563 4869 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298573 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298584 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298595 4869 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298605 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298615 4869 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298626 4869 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298638 4869 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298647 4869 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298659 4869 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298669 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298679 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298690 4869 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298701 4869 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298729 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298739 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298750 4869 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298760 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298771 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298783 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298796 4869 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298810 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298820 4869 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298831 4869 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298842 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298887 4869 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298901 4869 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298914 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298931 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298944 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.298955 4869 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.297755 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.301911 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.303577 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.309660 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.319075 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.332444 4869 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.356147 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.369635 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.384331 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.396947 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.399384 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.399429 4869 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.399441 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.414932 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.425212 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.428189 4869 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-01-30 10:49:29 +0000 UTC, rotation deadline is 2026-12-08 01:09:22.133113868 +0000 UTC Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.428300 4869 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7478h14m51.704818603s for next certificate rotation Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.432936 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.433050 4869 scope.go:117] "RemoveContainer" containerID="888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5" Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.433259 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.438144 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.440970 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.447315 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.454769 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.457319 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.471101 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.480523 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:30 crc kubenswrapper[4869]: W0130 10:54:30.485525 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-ab4becb9a340d0fafb8e06973a842b01584d1d4cbb58d2f7c0b9e2e537536173 WatchSource:0}: Error finding container ab4becb9a340d0fafb8e06973a842b01584d1d4cbb58d2f7c0b9e2e537536173: Status 404 returned error can't find the container with id ab4becb9a340d0fafb8e06973a842b01584d1d4cbb58d2f7c0b9e2e537536173 Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.491196 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.502087 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.524052 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.701693 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.701811 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:54:31.701783218 +0000 UTC m=+22.251659274 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.702097 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.702175 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.702257 4869 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.702314 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 10:54:31.702305193 +0000 UTC m=+22.252181259 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.702394 4869 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.702552 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 10:54:31.702518569 +0000 UTC m=+22.252394765 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.802928 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:54:30 crc kubenswrapper[4869]: I0130 10:54:30.803016 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.803183 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.803214 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.803231 4869 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.803307 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-30 10:54:31.803282071 +0000 UTC m=+22.353158137 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.803183 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.803349 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.803358 4869 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:54:30 crc kubenswrapper[4869]: E0130 10:54:30.803386 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-30 10:54:31.803377504 +0000 UTC m=+22.353253570 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.083294 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-99lr2"] Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.083690 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.085738 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.085785 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.086391 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.086497 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.086894 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.097980 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-5jpbv"] Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.098442 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.098592 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-6fqgt"] Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.099472 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-4dlfn"] Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.099764 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-4dlfn" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.100205 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.100574 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.102110 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.102272 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.102357 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-twvdq"] Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.102377 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.102501 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.102554 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.102656 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.103091 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.104220 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.104286 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.107773 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.108117 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.108784 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.115018 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 02:56:50.289277939 +0000 UTC Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.115079 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.115558 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.115587 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.115676 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.115774 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.115791 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.128267 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.132034 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:31 crc kubenswrapper[4869]: E0130 10:54:31.132153 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.140765 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.155122 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c27837499dde2cab3a368980b23ff5ef72b883b7f7b313fc6f2dc49d3d1627f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:23Z\\\",\\\"message\\\":\\\"W0130 10:54:13.244306 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 10:54:13.245599 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769770453 cert, and key in /tmp/serving-cert-2062903219/serving-signer.crt, /tmp/serving-cert-2062903219/serving-signer.key\\\\nI0130 10:54:13.415763 1 observer_polling.go:159] Starting file observer\\\\nW0130 10:54:13.420136 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 10:54:13.420257 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:13.423007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2062903219/tls.crt::/tmp/serving-cert-2062903219/tls.key\\\\\\\"\\\\nF0130 10:54:23.889891 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.166619 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.175347 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.186413 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.199601 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.206841 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa-system-cni-dir\") pod \"multus-additional-cni-plugins-6fqgt\" (UID: \"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\") " pod="openshift-multus/multus-additional-cni-plugins-6fqgt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.206888 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-multus-conf-dir\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.206911 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ef13186b-7f82-4025-97e3-d899be8c207f-proxy-tls\") pod \"machine-config-daemon-99lr2\" (UID: \"ef13186b-7f82-4025-97e3-d899be8c207f\") " pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.206937 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-6fqgt\" (UID: \"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\") " pod="openshift-multus/multus-additional-cni-plugins-6fqgt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207007 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-hostroot\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207054 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/c2dfbd43-97e4-4009-96e9-43abfe887630-hosts-file\") pod \"node-resolver-4dlfn\" (UID: \"c2dfbd43-97e4-4009-96e9-43abfe887630\") " pod="openshift-dns/node-resolver-4dlfn" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207075 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-cni-netd\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207113 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3e4cac66-8338-46fe-8296-ce9dbd2257bd-ovn-node-metrics-cert\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207199 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-cnibin\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207243 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-host-var-lib-cni-bin\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207264 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-run-ovn-kubernetes\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207277 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-cni-bin\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207296 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-host-var-lib-cni-multus\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207320 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-host-var-lib-kubelet\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207341 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-etc-kubernetes\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207358 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/ef13186b-7f82-4025-97e3-d899be8c207f-rootfs\") pod \"machine-config-daemon-99lr2\" (UID: \"ef13186b-7f82-4025-97e3-d899be8c207f\") " pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207376 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-systemd-units\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207395 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa-cni-binary-copy\") pod \"multus-additional-cni-plugins-6fqgt\" (UID: \"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\") " pod="openshift-multus/multus-additional-cni-plugins-6fqgt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207411 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/02f48f89-74aa-48e8-930e-7a86f15de2de-cni-binary-copy\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207441 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ef13186b-7f82-4025-97e3-d899be8c207f-mcd-auth-proxy-config\") pod \"machine-config-daemon-99lr2\" (UID: \"ef13186b-7f82-4025-97e3-d899be8c207f\") " pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207474 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-run-systemd\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207493 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-run-openvswitch\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207531 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-multus-socket-dir-parent\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207547 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-run-ovn\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207570 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-kubelet\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207587 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-slash\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207664 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa-tuning-conf-dir\") pod \"multus-additional-cni-plugins-6fqgt\" (UID: \"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\") " pod="openshift-multus/multus-additional-cni-plugins-6fqgt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207683 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skz45\" (UniqueName: \"kubernetes.io/projected/02f48f89-74aa-48e8-930e-7a86f15de2de-kube-api-access-skz45\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207724 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-run-netns\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207745 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wc2x\" (UniqueName: \"kubernetes.io/projected/3e4cac66-8338-46fe-8296-ce9dbd2257bd-kube-api-access-9wc2x\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207761 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bttx\" (UniqueName: \"kubernetes.io/projected/c2dfbd43-97e4-4009-96e9-43abfe887630-kube-api-access-9bttx\") pod \"node-resolver-4dlfn\" (UID: \"c2dfbd43-97e4-4009-96e9-43abfe887630\") " pod="openshift-dns/node-resolver-4dlfn" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207801 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-multus-cni-dir\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207819 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-host-run-netns\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207834 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5xxm\" (UniqueName: \"kubernetes.io/projected/ef13186b-7f82-4025-97e3-d899be8c207f-kube-api-access-s5xxm\") pod \"machine-config-daemon-99lr2\" (UID: \"ef13186b-7f82-4025-97e3-d899be8c207f\") " pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207854 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-var-lib-openvswitch\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207869 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-node-log\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207884 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207928 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3e4cac66-8338-46fe-8296-ce9dbd2257bd-env-overrides\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207943 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3e4cac66-8338-46fe-8296-ce9dbd2257bd-ovnkube-script-lib\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207965 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hf454\" (UniqueName: \"kubernetes.io/projected/ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa-kube-api-access-hf454\") pod \"multus-additional-cni-plugins-6fqgt\" (UID: \"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\") " pod="openshift-multus/multus-additional-cni-plugins-6fqgt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.207993 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-log-socket\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.208009 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa-cnibin\") pod \"multus-additional-cni-plugins-6fqgt\" (UID: \"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\") " pod="openshift-multus/multus-additional-cni-plugins-6fqgt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.208026 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/02f48f89-74aa-48e8-930e-7a86f15de2de-multus-daemon-config\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.208044 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa-os-release\") pod \"multus-additional-cni-plugins-6fqgt\" (UID: \"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\") " pod="openshift-multus/multus-additional-cni-plugins-6fqgt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.208067 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-system-cni-dir\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.208087 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-host-run-k8s-cni-cncf-io\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.208107 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-host-run-multus-certs\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.208134 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3e4cac66-8338-46fe-8296-ce9dbd2257bd-ovnkube-config\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.208209 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-os-release\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.208226 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-etc-openvswitch\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.209851 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.220908 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.232574 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.255120 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.270789 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c27837499dde2cab3a368980b23ff5ef72b883b7f7b313fc6f2dc49d3d1627f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:23Z\\\",\\\"message\\\":\\\"W0130 10:54:13.244306 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 10:54:13.245599 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769770453 cert, and key in /tmp/serving-cert-2062903219/serving-signer.crt, /tmp/serving-cert-2062903219/serving-signer.key\\\\nI0130 10:54:13.415763 1 observer_polling.go:159] Starting file observer\\\\nW0130 10:54:13.420136 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 10:54:13.420257 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:13.423007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2062903219/tls.crt::/tmp/serving-cert-2062903219/tls.key\\\\\\\"\\\\nF0130 10:54:23.889891 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:31Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.276186 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.279904 4869 scope.go:117] "RemoveContainer" containerID="888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5" Jan 30 10:54:31 crc kubenswrapper[4869]: E0130 10:54:31.280134 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.280177 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"ab4becb9a340d0fafb8e06973a842b01584d1d4cbb58d2f7c0b9e2e537536173"} Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.282045 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb"} Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.282103 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e"} Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.282116 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"f0d1f12773500851d943f3d344598fdef2511a5acace8d3824e80fd1958d97e2"} Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.283217 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3"} Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.283268 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"e025709dc4c9f1355c65483c6d4a92e2087d0a473b8c28f899351df0ed0dc3ac"} Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.289260 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:31Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.302096 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:31Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.309521 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-multus-socket-dir-parent\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.309565 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-run-ovn\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.309610 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa-tuning-conf-dir\") pod \"multus-additional-cni-plugins-6fqgt\" (UID: \"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\") " pod="openshift-multus/multus-additional-cni-plugins-6fqgt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.309629 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skz45\" (UniqueName: \"kubernetes.io/projected/02f48f89-74aa-48e8-930e-7a86f15de2de-kube-api-access-skz45\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.309650 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-kubelet\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.309659 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-run-ovn\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.309628 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-multus-socket-dir-parent\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.309727 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-kubelet\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.309692 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-slash\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.309667 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-slash\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.309929 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-run-netns\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.309966 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wc2x\" (UniqueName: \"kubernetes.io/projected/3e4cac66-8338-46fe-8296-ce9dbd2257bd-kube-api-access-9wc2x\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310014 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-run-netns\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310022 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-multus-cni-dir\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310056 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-host-run-netns\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310084 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5xxm\" (UniqueName: \"kubernetes.io/projected/ef13186b-7f82-4025-97e3-d899be8c207f-kube-api-access-s5xxm\") pod \"machine-config-daemon-99lr2\" (UID: \"ef13186b-7f82-4025-97e3-d899be8c207f\") " pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310105 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bttx\" (UniqueName: \"kubernetes.io/projected/c2dfbd43-97e4-4009-96e9-43abfe887630-kube-api-access-9bttx\") pod \"node-resolver-4dlfn\" (UID: \"c2dfbd43-97e4-4009-96e9-43abfe887630\") " pod="openshift-dns/node-resolver-4dlfn" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310125 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-node-log\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310144 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310167 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-var-lib-openvswitch\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310187 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hf454\" (UniqueName: \"kubernetes.io/projected/ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa-kube-api-access-hf454\") pod \"multus-additional-cni-plugins-6fqgt\" (UID: \"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\") " pod="openshift-multus/multus-additional-cni-plugins-6fqgt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310210 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-log-socket\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310233 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3e4cac66-8338-46fe-8296-ce9dbd2257bd-env-overrides\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310258 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3e4cac66-8338-46fe-8296-ce9dbd2257bd-ovnkube-script-lib\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310270 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-multus-cni-dir\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310281 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/02f48f89-74aa-48e8-930e-7a86f15de2de-multus-daemon-config\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310304 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa-cnibin\") pod \"multus-additional-cni-plugins-6fqgt\" (UID: \"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\") " pod="openshift-multus/multus-additional-cni-plugins-6fqgt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310324 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-system-cni-dir\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310338 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-var-lib-openvswitch\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310345 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-host-run-k8s-cni-cncf-io\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310368 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-host-run-multus-certs\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310377 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-host-run-netns\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310388 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3e4cac66-8338-46fe-8296-ce9dbd2257bd-ovnkube-config\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310379 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa-tuning-conf-dir\") pod \"multus-additional-cni-plugins-6fqgt\" (UID: \"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\") " pod="openshift-multus/multus-additional-cni-plugins-6fqgt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310412 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa-os-release\") pod \"multus-additional-cni-plugins-6fqgt\" (UID: \"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\") " pod="openshift-multus/multus-additional-cni-plugins-6fqgt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310421 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-node-log\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310438 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-os-release\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310460 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310460 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-etc-openvswitch\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310491 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-etc-openvswitch\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310504 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-multus-conf-dir\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310527 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa-cnibin\") pod \"multus-additional-cni-plugins-6fqgt\" (UID: \"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\") " pod="openshift-multus/multus-additional-cni-plugins-6fqgt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310540 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-system-cni-dir\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310535 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ef13186b-7f82-4025-97e3-d899be8c207f-proxy-tls\") pod \"machine-config-daemon-99lr2\" (UID: \"ef13186b-7f82-4025-97e3-d899be8c207f\") " pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310573 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-host-run-k8s-cni-cncf-io\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310578 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa-system-cni-dir\") pod \"multus-additional-cni-plugins-6fqgt\" (UID: \"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\") " pod="openshift-multus/multus-additional-cni-plugins-6fqgt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310598 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-host-run-multus-certs\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310601 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/c2dfbd43-97e4-4009-96e9-43abfe887630-hosts-file\") pod \"node-resolver-4dlfn\" (UID: \"c2dfbd43-97e4-4009-96e9-43abfe887630\") " pod="openshift-dns/node-resolver-4dlfn" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310621 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-6fqgt\" (UID: \"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\") " pod="openshift-multus/multus-additional-cni-plugins-6fqgt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310644 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-hostroot\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310677 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-cnibin\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310724 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-cni-netd\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310745 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3e4cac66-8338-46fe-8296-ce9dbd2257bd-ovn-node-metrics-cert\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310766 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-host-var-lib-cni-bin\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310786 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-run-ovn-kubernetes\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310808 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-cni-bin\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310828 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-host-var-lib-cni-multus\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310847 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-host-var-lib-kubelet\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310869 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-etc-kubernetes\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310888 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/ef13186b-7f82-4025-97e3-d899be8c207f-rootfs\") pod \"machine-config-daemon-99lr2\" (UID: \"ef13186b-7f82-4025-97e3-d899be8c207f\") " pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310910 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-systemd-units\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310929 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/02f48f89-74aa-48e8-930e-7a86f15de2de-cni-binary-copy\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310949 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ef13186b-7f82-4025-97e3-d899be8c207f-mcd-auth-proxy-config\") pod \"machine-config-daemon-99lr2\" (UID: \"ef13186b-7f82-4025-97e3-d899be8c207f\") " pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310972 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-run-systemd\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.310993 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-run-openvswitch\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.311012 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa-cni-binary-copy\") pod \"multus-additional-cni-plugins-6fqgt\" (UID: \"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\") " pod="openshift-multus/multus-additional-cni-plugins-6fqgt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.311250 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa-os-release\") pod \"multus-additional-cni-plugins-6fqgt\" (UID: \"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\") " pod="openshift-multus/multus-additional-cni-plugins-6fqgt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.311288 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-os-release\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.311334 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa-system-cni-dir\") pod \"multus-additional-cni-plugins-6fqgt\" (UID: \"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\") " pod="openshift-multus/multus-additional-cni-plugins-6fqgt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.311438 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-host-var-lib-cni-bin\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.311729 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/c2dfbd43-97e4-4009-96e9-43abfe887630-hosts-file\") pod \"node-resolver-4dlfn\" (UID: \"c2dfbd43-97e4-4009-96e9-43abfe887630\") " pod="openshift-dns/node-resolver-4dlfn" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.311964 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3e4cac66-8338-46fe-8296-ce9dbd2257bd-ovnkube-config\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.311970 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa-cni-binary-copy\") pod \"multus-additional-cni-plugins-6fqgt\" (UID: \"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\") " pod="openshift-multus/multus-additional-cni-plugins-6fqgt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.312002 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-cnibin\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.312014 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-run-ovn-kubernetes\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.312044 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-systemd-units\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.312044 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/ef13186b-7f82-4025-97e3-d899be8c207f-rootfs\") pod \"machine-config-daemon-99lr2\" (UID: \"ef13186b-7f82-4025-97e3-d899be8c207f\") " pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.312086 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-cni-bin\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.312099 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-log-socket\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.312077 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-hostroot\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.312132 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-cni-netd\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.312172 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-etc-kubernetes\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.312134 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-host-var-lib-cni-multus\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.312182 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3e4cac66-8338-46fe-8296-ce9dbd2257bd-env-overrides\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.312207 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-run-systemd\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.312138 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3e4cac66-8338-46fe-8296-ce9dbd2257bd-ovnkube-script-lib\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.312168 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-host-var-lib-kubelet\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.312220 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/02f48f89-74aa-48e8-930e-7a86f15de2de-multus-conf-dir\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.312244 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-run-openvswitch\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.312784 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-6fqgt\" (UID: \"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\") " pod="openshift-multus/multus-additional-cni-plugins-6fqgt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.312855 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/02f48f89-74aa-48e8-930e-7a86f15de2de-multus-daemon-config\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.312878 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ef13186b-7f82-4025-97e3-d899be8c207f-mcd-auth-proxy-config\") pod \"machine-config-daemon-99lr2\" (UID: \"ef13186b-7f82-4025-97e3-d899be8c207f\") " pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.312948 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/02f48f89-74aa-48e8-930e-7a86f15de2de-cni-binary-copy\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.317774 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3e4cac66-8338-46fe-8296-ce9dbd2257bd-ovn-node-metrics-cert\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.318171 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ef13186b-7f82-4025-97e3-d899be8c207f-proxy-tls\") pod \"machine-config-daemon-99lr2\" (UID: \"ef13186b-7f82-4025-97e3-d899be8c207f\") " pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.330125 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:31Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.332050 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skz45\" (UniqueName: \"kubernetes.io/projected/02f48f89-74aa-48e8-930e-7a86f15de2de-kube-api-access-skz45\") pod \"multus-5jpbv\" (UID: \"02f48f89-74aa-48e8-930e-7a86f15de2de\") " pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.332074 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5xxm\" (UniqueName: \"kubernetes.io/projected/ef13186b-7f82-4025-97e3-d899be8c207f-kube-api-access-s5xxm\") pod \"machine-config-daemon-99lr2\" (UID: \"ef13186b-7f82-4025-97e3-d899be8c207f\") " pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.334982 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hf454\" (UniqueName: \"kubernetes.io/projected/ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa-kube-api-access-hf454\") pod \"multus-additional-cni-plugins-6fqgt\" (UID: \"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\") " pod="openshift-multus/multus-additional-cni-plugins-6fqgt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.335040 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bttx\" (UniqueName: \"kubernetes.io/projected/c2dfbd43-97e4-4009-96e9-43abfe887630-kube-api-access-9bttx\") pod \"node-resolver-4dlfn\" (UID: \"c2dfbd43-97e4-4009-96e9-43abfe887630\") " pod="openshift-dns/node-resolver-4dlfn" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.336117 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wc2x\" (UniqueName: \"kubernetes.io/projected/3e4cac66-8338-46fe-8296-ce9dbd2257bd-kube-api-access-9wc2x\") pod \"ovnkube-node-twvdq\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.346662 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:31Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.360946 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:31Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.375522 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:31Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.388437 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:31Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.399796 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:31Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.406584 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.416653 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:31Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.418486 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-5jpbv" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.425928 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-4dlfn" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.431540 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:31Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.435062 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.441891 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.447251 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:31Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:31 crc kubenswrapper[4869]: W0130 10:54:31.458427 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef13186b_7f82_4025_97e3_d899be8c207f.slice/crio-098cbeb44ba70ce9369e4ae7c285822faad5e79638e59695a5f973bff3013145 WatchSource:0}: Error finding container 098cbeb44ba70ce9369e4ae7c285822faad5e79638e59695a5f973bff3013145: Status 404 returned error can't find the container with id 098cbeb44ba70ce9369e4ae7c285822faad5e79638e59695a5f973bff3013145 Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.463143 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:31Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.490178 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:31Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.506081 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:31Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.526948 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:31Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.539329 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:31Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.555435 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:31Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.580328 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:31Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.597284 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:31Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.612251 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:31Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.625979 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:31Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.641001 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:31Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.717162 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.717319 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.717351 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:31 crc kubenswrapper[4869]: E0130 10:54:31.717476 4869 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 10:54:31 crc kubenswrapper[4869]: E0130 10:54:31.717549 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 10:54:33.71752747 +0000 UTC m=+24.267403536 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 10:54:31 crc kubenswrapper[4869]: E0130 10:54:31.717628 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:54:33.717617202 +0000 UTC m=+24.267493268 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:54:31 crc kubenswrapper[4869]: E0130 10:54:31.717748 4869 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 10:54:31 crc kubenswrapper[4869]: E0130 10:54:31.717782 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 10:54:33.717773227 +0000 UTC m=+24.267649293 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.818176 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:54:31 crc kubenswrapper[4869]: I0130 10:54:31.818225 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:54:31 crc kubenswrapper[4869]: E0130 10:54:31.818395 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 10:54:31 crc kubenswrapper[4869]: E0130 10:54:31.818432 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 10:54:31 crc kubenswrapper[4869]: E0130 10:54:31.818444 4869 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:54:31 crc kubenswrapper[4869]: E0130 10:54:31.818488 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-30 10:54:33.818472467 +0000 UTC m=+24.368348533 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:54:31 crc kubenswrapper[4869]: E0130 10:54:31.818397 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 10:54:31 crc kubenswrapper[4869]: E0130 10:54:31.818517 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 10:54:31 crc kubenswrapper[4869]: E0130 10:54:31.818525 4869 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:54:31 crc kubenswrapper[4869]: E0130 10:54:31.818551 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-30 10:54:33.818543859 +0000 UTC m=+24.368419925 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.115217 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 14:31:07.655160206 +0000 UTC Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.132036 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:54:32 crc kubenswrapper[4869]: E0130 10:54:32.132215 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.132283 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:54:32 crc kubenswrapper[4869]: E0130 10:54:32.132462 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.137022 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.137783 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.138468 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.139184 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.139841 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.140378 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.197373 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.198396 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.199143 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.199740 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.200342 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.201068 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.201621 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.202187 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.202748 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.203283 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.204094 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.204531 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.205421 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.206141 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.206654 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.209266 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.209742 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.210438 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.210919 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.211586 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.212359 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.213008 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.213788 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.214393 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.214933 4869 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.215054 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.216524 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.217169 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.217626 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.222258 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.223038 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.224109 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.224917 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.226010 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.226516 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.228403 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.229121 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.230204 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.230676 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.231695 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.232298 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.233595 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.234168 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.235280 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.235977 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.236622 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.237732 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.238258 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.289131 4869 generic.go:334] "Generic (PLEG): container finished" podID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerID="7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366" exitCode=0 Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.289219 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerDied","Data":"7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366"} Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.289290 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerStarted","Data":"a3edb8ad57797f63733b91bcf2451a1fb6b443a155222148386450ad0e0cf3a4"} Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.292794 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5jpbv" event={"ID":"02f48f89-74aa-48e8-930e-7a86f15de2de","Type":"ContainerStarted","Data":"0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f"} Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.292840 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5jpbv" event={"ID":"02f48f89-74aa-48e8-930e-7a86f15de2de","Type":"ContainerStarted","Data":"ca2cf1777f56f3a4c8ba6ef24b0ef8bf28616664c7b8349a1b48ec9879f8b230"} Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.299158 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerStarted","Data":"1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2"} Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.299239 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerStarted","Data":"ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4"} Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.299260 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerStarted","Data":"098cbeb44ba70ce9369e4ae7c285822faad5e79638e59695a5f973bff3013145"} Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.301494 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" event={"ID":"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa","Type":"ContainerStarted","Data":"8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031"} Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.301558 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" event={"ID":"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa","Type":"ContainerStarted","Data":"4588a421b8ea62bcfc3962d2fc1c688097eefe6971deec5de5af54a2cbeae8d3"} Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.304215 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-4dlfn" event={"ID":"c2dfbd43-97e4-4009-96e9-43abfe887630","Type":"ContainerStarted","Data":"28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241"} Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.304289 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-4dlfn" event={"ID":"c2dfbd43-97e4-4009-96e9-43abfe887630","Type":"ContainerStarted","Data":"effc6e0425e2fc070a6ff04ed08935c88ce5373dd747bc02b407f832ed6feaf8"} Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.316995 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.338379 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.362787 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.385229 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.399580 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.419603 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.438206 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.454762 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.473261 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.488511 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.511437 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.528521 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.550209 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.569509 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.588182 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.606901 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.623071 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.639124 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.667944 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.680851 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.694971 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.718500 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.732793 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.752239 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.770602 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:32 crc kubenswrapper[4869]: I0130 10:54:32.790548 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:32Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.115852 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 19:03:13.648272212 +0000 UTC Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.132327 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:33 crc kubenswrapper[4869]: E0130 10:54:33.132499 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.311160 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed"} Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.315313 4869 generic.go:334] "Generic (PLEG): container finished" podID="ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa" containerID="8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031" exitCode=0 Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.315375 4869 generic.go:334] "Generic (PLEG): container finished" podID="ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa" containerID="5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25" exitCode=0 Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.315378 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" event={"ID":"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa","Type":"ContainerDied","Data":"8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031"} Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.315465 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" event={"ID":"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa","Type":"ContainerDied","Data":"5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25"} Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.322797 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerStarted","Data":"24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a"} Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.322847 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerStarted","Data":"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441"} Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.322860 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerStarted","Data":"4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0"} Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.322872 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerStarted","Data":"d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df"} Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.322884 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerStarted","Data":"5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c"} Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.322896 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerStarted","Data":"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588"} Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.330631 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.355945 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.371927 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.373349 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-2ppc8"] Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.374023 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-2ppc8" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.377507 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.377676 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.377788 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.379334 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.401012 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.419651 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.434063 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.457168 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.476632 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.491867 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.509075 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.525733 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.539771 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d7dd6692-0691-4b9f-8ba4-d76c0e423f0c-host\") pod \"node-ca-2ppc8\" (UID: \"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\") " pod="openshift-image-registry/node-ca-2ppc8" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.539839 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/d7dd6692-0691-4b9f-8ba4-d76c0e423f0c-serviceca\") pod \"node-ca-2ppc8\" (UID: \"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\") " pod="openshift-image-registry/node-ca-2ppc8" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.539873 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fv9p\" (UniqueName: \"kubernetes.io/projected/d7dd6692-0691-4b9f-8ba4-d76c0e423f0c-kube-api-access-7fv9p\") pod \"node-ca-2ppc8\" (UID: \"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\") " pod="openshift-image-registry/node-ca-2ppc8" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.540545 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.554257 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.566804 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.582131 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.593515 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.608223 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.623441 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.637800 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.641286 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d7dd6692-0691-4b9f-8ba4-d76c0e423f0c-host\") pod \"node-ca-2ppc8\" (UID: \"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\") " pod="openshift-image-registry/node-ca-2ppc8" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.641373 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/d7dd6692-0691-4b9f-8ba4-d76c0e423f0c-serviceca\") pod \"node-ca-2ppc8\" (UID: \"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\") " pod="openshift-image-registry/node-ca-2ppc8" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.641405 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fv9p\" (UniqueName: \"kubernetes.io/projected/d7dd6692-0691-4b9f-8ba4-d76c0e423f0c-kube-api-access-7fv9p\") pod \"node-ca-2ppc8\" (UID: \"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\") " pod="openshift-image-registry/node-ca-2ppc8" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.641481 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d7dd6692-0691-4b9f-8ba4-d76c0e423f0c-host\") pod \"node-ca-2ppc8\" (UID: \"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\") " pod="openshift-image-registry/node-ca-2ppc8" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.642432 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/d7dd6692-0691-4b9f-8ba4-d76c0e423f0c-serviceca\") pod \"node-ca-2ppc8\" (UID: \"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\") " pod="openshift-image-registry/node-ca-2ppc8" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.652409 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.667776 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fv9p\" (UniqueName: \"kubernetes.io/projected/d7dd6692-0691-4b9f-8ba4-d76c0e423f0c-kube-api-access-7fv9p\") pod \"node-ca-2ppc8\" (UID: \"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\") " pod="openshift-image-registry/node-ca-2ppc8" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.675144 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.691269 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.702983 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.717527 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.735910 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.742183 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.742318 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.742349 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:33 crc kubenswrapper[4869]: E0130 10:54:33.744363 4869 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 10:54:33 crc kubenswrapper[4869]: E0130 10:54:33.744474 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 10:54:37.744452125 +0000 UTC m=+28.294328191 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 10:54:33 crc kubenswrapper[4869]: E0130 10:54:33.744352 4869 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 10:54:33 crc kubenswrapper[4869]: E0130 10:54:33.744535 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 10:54:37.744527737 +0000 UTC m=+28.294403803 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 10:54:33 crc kubenswrapper[4869]: E0130 10:54:33.747623 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:54:37.747583644 +0000 UTC m=+28.297459710 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.760189 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.772493 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.773578 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.787022 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.787560 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.788879 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.799933 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.813599 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.827441 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.838263 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.843781 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.843852 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:54:33 crc kubenswrapper[4869]: E0130 10:54:33.844008 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 10:54:33 crc kubenswrapper[4869]: E0130 10:54:33.844030 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 10:54:33 crc kubenswrapper[4869]: E0130 10:54:33.844043 4869 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:54:33 crc kubenswrapper[4869]: E0130 10:54:33.844008 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 10:54:33 crc kubenswrapper[4869]: E0130 10:54:33.844112 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 10:54:33 crc kubenswrapper[4869]: E0130 10:54:33.844123 4869 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:54:33 crc kubenswrapper[4869]: E0130 10:54:33.844095 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-30 10:54:37.844077054 +0000 UTC m=+28.393953120 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:54:33 crc kubenswrapper[4869]: E0130 10:54:33.844163 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-30 10:54:37.844153546 +0000 UTC m=+28.394029612 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.851927 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.867917 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.878174 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-2ppc8" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.883915 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.908238 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.926509 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.949236 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.970768 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:33 crc kubenswrapper[4869]: I0130 10:54:33.985386 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:33Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.020364 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:34Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.062797 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:34Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.101224 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:34Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.116269 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 15:10:11.083441144 +0000 UTC Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.132551 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.132578 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:54:34 crc kubenswrapper[4869]: E0130 10:54:34.132798 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:54:34 crc kubenswrapper[4869]: E0130 10:54:34.132991 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.141492 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:34Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.186402 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:34Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.225595 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:34Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.263926 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:34Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.301651 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:34Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.328602 4869 generic.go:334] "Generic (PLEG): container finished" podID="ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa" containerID="875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71" exitCode=0 Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.328661 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" event={"ID":"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa","Type":"ContainerDied","Data":"875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71"} Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.331343 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-2ppc8" event={"ID":"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c","Type":"ContainerStarted","Data":"27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6"} Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.331411 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-2ppc8" event={"ID":"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c","Type":"ContainerStarted","Data":"1006d9f9d58c81fbcefe55732c4a716a9e13a7d0d0cfaa37b1c0a2316a2f304b"} Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.341116 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:34Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.381438 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:34Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.419064 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:34Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.463795 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:34Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.499790 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:34Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.539973 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:34Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.580854 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:34Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.619690 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:34Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.667061 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:34Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.706983 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:34Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.746074 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:34Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.779439 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:34Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.822390 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:34Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.865200 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:34Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.907338 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:34Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.938447 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:34Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:34 crc kubenswrapper[4869]: I0130 10:54:34.980415 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:34Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.019614 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.061579 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.101558 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.117015 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 06:11:13.347729835 +0000 UTC Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.132028 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:35 crc kubenswrapper[4869]: E0130 10:54:35.132445 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.140837 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.191177 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.226098 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.338138 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerStarted","Data":"5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c"} Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.341414 4869 generic.go:334] "Generic (PLEG): container finished" podID="ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa" containerID="befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55" exitCode=0 Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.341472 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" event={"ID":"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa","Type":"ContainerDied","Data":"befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55"} Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.354782 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.373812 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.392286 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.410408 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.429544 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.460352 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.500505 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.540844 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.583344 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.626539 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.667609 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.702109 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.750505 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.760205 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.767459 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.767546 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.767563 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.767746 4869 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.783629 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.833537 4869 kubelet_node_status.go:115] "Node was previously registered" node="crc" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.833911 4869 kubelet_node_status.go:79] "Successfully registered node" node="crc" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.835429 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.835455 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.835465 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.835483 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.835495 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:35Z","lastTransitionTime":"2026-01-30T10:54:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:35 crc kubenswrapper[4869]: E0130 10:54:35.848806 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.853066 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.853101 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.853111 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.853131 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.853143 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:35Z","lastTransitionTime":"2026-01-30T10:54:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.865803 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: E0130 10:54:35.868376 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.872612 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.872658 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.872669 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.872687 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.872697 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:35Z","lastTransitionTime":"2026-01-30T10:54:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:35 crc kubenswrapper[4869]: E0130 10:54:35.889080 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.895141 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.895194 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.895204 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.895222 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.895234 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:35Z","lastTransitionTime":"2026-01-30T10:54:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:35 crc kubenswrapper[4869]: E0130 10:54:35.908391 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.912913 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.912968 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.912981 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.913006 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.913021 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:35Z","lastTransitionTime":"2026-01-30T10:54:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:35 crc kubenswrapper[4869]: E0130 10:54:35.926756 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:35Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:35 crc kubenswrapper[4869]: E0130 10:54:35.926906 4869 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.928961 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.928988 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.929016 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.929034 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:35 crc kubenswrapper[4869]: I0130 10:54:35.929044 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:35Z","lastTransitionTime":"2026-01-30T10:54:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.033487 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.033555 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.033564 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.033582 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.033614 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:36Z","lastTransitionTime":"2026-01-30T10:54:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.117264 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 13:54:36.296127267 +0000 UTC Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.132892 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.132940 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:54:36 crc kubenswrapper[4869]: E0130 10:54:36.133115 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:54:36 crc kubenswrapper[4869]: E0130 10:54:36.133312 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.139700 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.139764 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.139776 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.139890 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.139908 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:36Z","lastTransitionTime":"2026-01-30T10:54:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.243031 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.243084 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.243098 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.243120 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.243136 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:36Z","lastTransitionTime":"2026-01-30T10:54:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.249555 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.251426 4869 scope.go:117] "RemoveContainer" containerID="888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5" Jan 30 10:54:36 crc kubenswrapper[4869]: E0130 10:54:36.251607 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.346944 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.346983 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.346993 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.347010 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.347021 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:36Z","lastTransitionTime":"2026-01-30T10:54:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.349585 4869 generic.go:334] "Generic (PLEG): container finished" podID="ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa" containerID="e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59" exitCode=0 Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.349626 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" event={"ID":"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa","Type":"ContainerDied","Data":"e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59"} Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.364293 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:36Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.391164 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:36Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.425445 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:36Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.443826 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:36Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.449646 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.449728 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.449741 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.449765 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.449778 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:36Z","lastTransitionTime":"2026-01-30T10:54:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.458817 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:36Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.470949 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:36Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.486363 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:36Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.509355 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:36Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.527594 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:36Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.543919 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:36Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.554162 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.554370 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.554440 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.554522 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.554627 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:36Z","lastTransitionTime":"2026-01-30T10:54:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.560491 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:36Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.577972 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:36Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.591895 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:36Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.607812 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:36Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.618255 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:36Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.657542 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.658109 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.658203 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.658337 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.658423 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:36Z","lastTransitionTime":"2026-01-30T10:54:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.766996 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.767042 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.767054 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.767072 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.767084 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:36Z","lastTransitionTime":"2026-01-30T10:54:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.870653 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.870726 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.870742 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.870764 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.870781 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:36Z","lastTransitionTime":"2026-01-30T10:54:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.974171 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.974209 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.974219 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.974236 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:36 crc kubenswrapper[4869]: I0130 10:54:36.974247 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:36Z","lastTransitionTime":"2026-01-30T10:54:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.076696 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.076750 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.076763 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.076784 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.076797 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:37Z","lastTransitionTime":"2026-01-30T10:54:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.117763 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 01:48:06.758395234 +0000 UTC Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.132508 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:37 crc kubenswrapper[4869]: E0130 10:54:37.132726 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.179495 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.179534 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.179544 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.179562 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.179573 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:37Z","lastTransitionTime":"2026-01-30T10:54:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.283107 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.283153 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.283165 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.283188 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.283202 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:37Z","lastTransitionTime":"2026-01-30T10:54:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.357869 4869 generic.go:334] "Generic (PLEG): container finished" podID="ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa" containerID="4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718" exitCode=0 Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.357929 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" event={"ID":"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa","Type":"ContainerDied","Data":"4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718"} Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.379433 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:37Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.385849 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.386012 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.386131 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.386229 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.386321 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:37Z","lastTransitionTime":"2026-01-30T10:54:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.395374 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:37Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.414732 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:37Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.442324 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:37Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.460488 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:37Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.474473 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:37Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.487888 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:37Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.490923 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.490979 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.490995 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.491019 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.491036 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:37Z","lastTransitionTime":"2026-01-30T10:54:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.503142 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:37Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.516353 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:37Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.531252 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:37Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.545931 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:37Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.562781 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:37Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.578837 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:37Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.593131 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:37Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.593951 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.593981 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.593992 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.594009 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.594020 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:37Z","lastTransitionTime":"2026-01-30T10:54:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.608616 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:37Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.697490 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.697539 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.697552 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.697574 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.697588 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:37Z","lastTransitionTime":"2026-01-30T10:54:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.793239 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:54:37 crc kubenswrapper[4869]: E0130 10:54:37.793409 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:54:45.793370663 +0000 UTC m=+36.343246879 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.793532 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.793566 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:37 crc kubenswrapper[4869]: E0130 10:54:37.793724 4869 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 10:54:37 crc kubenswrapper[4869]: E0130 10:54:37.793775 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 10:54:45.793763395 +0000 UTC m=+36.343639631 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 10:54:37 crc kubenswrapper[4869]: E0130 10:54:37.793873 4869 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 10:54:37 crc kubenswrapper[4869]: E0130 10:54:37.793902 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 10:54:45.793895388 +0000 UTC m=+36.343771454 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.801242 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.801302 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.801314 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.801338 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.801351 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:37Z","lastTransitionTime":"2026-01-30T10:54:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.894119 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.894230 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:54:37 crc kubenswrapper[4869]: E0130 10:54:37.894381 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 10:54:37 crc kubenswrapper[4869]: E0130 10:54:37.894411 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 10:54:37 crc kubenswrapper[4869]: E0130 10:54:37.894429 4869 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:54:37 crc kubenswrapper[4869]: E0130 10:54:37.894492 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-30 10:54:45.894471255 +0000 UTC m=+36.444347321 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:54:37 crc kubenswrapper[4869]: E0130 10:54:37.894381 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 10:54:37 crc kubenswrapper[4869]: E0130 10:54:37.894524 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 10:54:37 crc kubenswrapper[4869]: E0130 10:54:37.894534 4869 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:54:37 crc kubenswrapper[4869]: E0130 10:54:37.894567 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-30 10:54:45.894558518 +0000 UTC m=+36.444434594 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.904793 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.904879 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.904896 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.904923 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:37 crc kubenswrapper[4869]: I0130 10:54:37.904939 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:37Z","lastTransitionTime":"2026-01-30T10:54:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.007893 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.007965 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.007982 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.008010 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.008025 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:38Z","lastTransitionTime":"2026-01-30T10:54:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.111063 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.111120 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.111133 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.111156 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.111171 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:38Z","lastTransitionTime":"2026-01-30T10:54:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.118251 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 09:21:06.369655233 +0000 UTC Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.132222 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.132268 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:54:38 crc kubenswrapper[4869]: E0130 10:54:38.132759 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:54:38 crc kubenswrapper[4869]: E0130 10:54:38.132871 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.214297 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.214382 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.214399 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.214425 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.214441 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:38Z","lastTransitionTime":"2026-01-30T10:54:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.317143 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.317205 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.317217 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.317239 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.317252 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:38Z","lastTransitionTime":"2026-01-30T10:54:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.365654 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerStarted","Data":"e18f29d7b517604067a2dfe68f1f286ce41836fc0ad0f20e0c3b17e1517e6d3f"} Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.365957 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.371078 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" event={"ID":"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa","Type":"ContainerStarted","Data":"f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d"} Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.381688 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.394659 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.395789 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.414186 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.420849 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.420909 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.420925 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.420945 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.420959 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:38Z","lastTransitionTime":"2026-01-30T10:54:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.429591 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.444438 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.458359 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.467867 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.478719 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.490245 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.508312 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18f29d7b517604067a2dfe68f1f286ce41836fc0ad0f20e0c3b17e1517e6d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.523265 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.523314 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.523327 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.523351 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.523366 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:38Z","lastTransitionTime":"2026-01-30T10:54:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.538372 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.557674 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.571316 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.580702 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.600793 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.616668 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.626430 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.626482 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.626496 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.626520 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.626539 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:38Z","lastTransitionTime":"2026-01-30T10:54:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.632080 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.646250 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.662835 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.678278 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.692827 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.704551 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.716503 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.728978 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.730010 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.730072 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.730084 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.730104 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.730114 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:38Z","lastTransitionTime":"2026-01-30T10:54:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.756112 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.775402 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.786188 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.795744 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.810973 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.828503 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18f29d7b517604067a2dfe68f1f286ce41836fc0ad0f20e0c3b17e1517e6d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:38Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.832541 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.832585 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.832598 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.832616 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.832630 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:38Z","lastTransitionTime":"2026-01-30T10:54:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.935743 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.935799 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.935815 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.935837 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:38 crc kubenswrapper[4869]: I0130 10:54:38.935850 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:38Z","lastTransitionTime":"2026-01-30T10:54:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.040196 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.040258 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.040275 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.040304 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.040325 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:39Z","lastTransitionTime":"2026-01-30T10:54:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.119016 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 06:25:47.297981506 +0000 UTC Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.132723 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:39 crc kubenswrapper[4869]: E0130 10:54:39.132917 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.143698 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.143757 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.143768 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.143788 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.143801 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:39Z","lastTransitionTime":"2026-01-30T10:54:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.247310 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.247427 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.247443 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.247468 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.247481 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:39Z","lastTransitionTime":"2026-01-30T10:54:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.350183 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.350222 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.350235 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.350254 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.350266 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:39Z","lastTransitionTime":"2026-01-30T10:54:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.375496 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.375543 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.404294 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.418801 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:39Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.433797 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:39Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.453009 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.453055 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.453067 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.453085 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.453100 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:39Z","lastTransitionTime":"2026-01-30T10:54:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.464496 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:39Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.492854 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:39Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.516639 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:39Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.538824 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:39Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.550412 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:39Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.555671 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.555724 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.555737 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.555754 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.555767 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:39Z","lastTransitionTime":"2026-01-30T10:54:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.565064 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:39Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.581014 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:39Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.600775 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18f29d7b517604067a2dfe68f1f286ce41836fc0ad0f20e0c3b17e1517e6d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:39Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.624167 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:39Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.641031 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:39Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.655551 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:39Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.658552 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.658578 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.658587 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.658607 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.658621 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:39Z","lastTransitionTime":"2026-01-30T10:54:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.666107 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:39Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.682679 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:39Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.761079 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.761128 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.761141 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.761161 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.761174 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:39Z","lastTransitionTime":"2026-01-30T10:54:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.863735 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.863769 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.863778 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.863791 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.863800 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:39Z","lastTransitionTime":"2026-01-30T10:54:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.967056 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.967111 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.967126 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.967146 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:39 crc kubenswrapper[4869]: I0130 10:54:39.967162 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:39Z","lastTransitionTime":"2026-01-30T10:54:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.070653 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.070761 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.070777 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.070798 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.070812 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:40Z","lastTransitionTime":"2026-01-30T10:54:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.120081 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 09:28:09.448121764 +0000 UTC Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.131959 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:54:40 crc kubenswrapper[4869]: E0130 10:54:40.132096 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.132397 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:54:40 crc kubenswrapper[4869]: E0130 10:54:40.132615 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.164497 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.174030 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.174109 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.174132 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.174163 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.174185 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:40Z","lastTransitionTime":"2026-01-30T10:54:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.190157 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.214095 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.241869 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.264038 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.276764 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.276796 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.276807 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.276822 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.276834 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:40Z","lastTransitionTime":"2026-01-30T10:54:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.284857 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18f29d7b517604067a2dfe68f1f286ce41836fc0ad0f20e0c3b17e1517e6d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.298362 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.317766 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.333451 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.347664 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.362564 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.375146 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.378995 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.379228 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.379332 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.379429 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.379512 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:40Z","lastTransitionTime":"2026-01-30T10:54:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.392672 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.411123 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.428793 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.483057 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.483109 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.483121 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.483143 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.483156 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:40Z","lastTransitionTime":"2026-01-30T10:54:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.585929 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.586003 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.586025 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.586054 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.586071 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:40Z","lastTransitionTime":"2026-01-30T10:54:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.693255 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.693833 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.693847 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.693871 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.693887 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:40Z","lastTransitionTime":"2026-01-30T10:54:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.797642 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.797695 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.797732 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.797756 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.797784 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:40Z","lastTransitionTime":"2026-01-30T10:54:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.900800 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.900898 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.900911 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.900931 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:40 crc kubenswrapper[4869]: I0130 10:54:40.900942 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:40Z","lastTransitionTime":"2026-01-30T10:54:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.004527 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.004625 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.004639 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.004663 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.004761 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:41Z","lastTransitionTime":"2026-01-30T10:54:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.107276 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.107322 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.107332 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.107353 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.107365 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:41Z","lastTransitionTime":"2026-01-30T10:54:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.120569 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 05:53:45.901625423 +0000 UTC Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.131997 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:41 crc kubenswrapper[4869]: E0130 10:54:41.132188 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.142295 4869 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.210698 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.210767 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.210781 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.210803 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.210819 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:41Z","lastTransitionTime":"2026-01-30T10:54:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.314679 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.314772 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.314787 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.314828 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.314841 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:41Z","lastTransitionTime":"2026-01-30T10:54:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.384631 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvdq_3e4cac66-8338-46fe-8296-ce9dbd2257bd/ovnkube-controller/0.log" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.387531 4869 generic.go:334] "Generic (PLEG): container finished" podID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerID="e18f29d7b517604067a2dfe68f1f286ce41836fc0ad0f20e0c3b17e1517e6d3f" exitCode=1 Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.387577 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerDied","Data":"e18f29d7b517604067a2dfe68f1f286ce41836fc0ad0f20e0c3b17e1517e6d3f"} Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.388627 4869 scope.go:117] "RemoveContainer" containerID="e18f29d7b517604067a2dfe68f1f286ce41836fc0ad0f20e0c3b17e1517e6d3f" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.411761 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:41Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.418058 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.418101 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.418110 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.418126 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.418137 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:41Z","lastTransitionTime":"2026-01-30T10:54:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.428216 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:41Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.441240 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:41Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.458635 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:41Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.478696 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18f29d7b517604067a2dfe68f1f286ce41836fc0ad0f20e0c3b17e1517e6d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e18f29d7b517604067a2dfe68f1f286ce41836fc0ad0f20e0c3b17e1517e6d3f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:54:40Z\\\",\\\"message\\\":\\\"8 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0130 10:54:40.395332 6178 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 10:54:40.396079 6178 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0130 10:54:40.396128 6178 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0130 10:54:40.396135 6178 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0130 10:54:40.396168 6178 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0130 10:54:40.396181 6178 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0130 10:54:40.396190 6178 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0130 10:54:40.396296 6178 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0130 10:54:40.396323 6178 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0130 10:54:40.396347 6178 factory.go:656] Stopping watch factory\\\\nI0130 10:54:40.396364 6178 ovnkube.go:599] Stopped ovnkube\\\\nI0130 10:54:40.396389 6178 handler.go:208] Removed *v1.Node event handler 2\\\\nI01\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:41Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.504951 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:41Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.519103 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:41Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.522402 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.522442 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.522451 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.522470 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.522481 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:41Z","lastTransitionTime":"2026-01-30T10:54:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.533734 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:41Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.548054 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:41Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.562335 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:41Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.581661 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:41Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.595618 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:41Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.613521 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:41Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.624926 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.625023 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.625044 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.625109 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.625132 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:41Z","lastTransitionTime":"2026-01-30T10:54:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.630404 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:41Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.644658 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:41Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.731390 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.731453 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.731463 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.731485 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.731499 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:41Z","lastTransitionTime":"2026-01-30T10:54:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.834535 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.834612 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.834632 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.834664 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.834685 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:41Z","lastTransitionTime":"2026-01-30T10:54:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.937162 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.937266 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.937333 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.937357 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:41 crc kubenswrapper[4869]: I0130 10:54:41.937369 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:41Z","lastTransitionTime":"2026-01-30T10:54:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.040616 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.040673 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.040681 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.040697 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.040723 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:42Z","lastTransitionTime":"2026-01-30T10:54:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.121410 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 04:32:49.143599022 +0000 UTC Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.133102 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.133196 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:54:42 crc kubenswrapper[4869]: E0130 10:54:42.133307 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:54:42 crc kubenswrapper[4869]: E0130 10:54:42.133484 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.144090 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.144149 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.144160 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.144183 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.144197 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:42Z","lastTransitionTime":"2026-01-30T10:54:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.250288 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.250379 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.250403 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.250434 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.250464 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:42Z","lastTransitionTime":"2026-01-30T10:54:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.353175 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.353246 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.353262 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.353287 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.353303 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:42Z","lastTransitionTime":"2026-01-30T10:54:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.393604 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvdq_3e4cac66-8338-46fe-8296-ce9dbd2257bd/ovnkube-controller/0.log" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.396104 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerStarted","Data":"00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac"} Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.396646 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.411930 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:42Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.424311 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:42Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.437044 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:42Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.453396 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:42Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.471163 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.471209 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.471220 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.471241 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.471253 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:42Z","lastTransitionTime":"2026-01-30T10:54:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.480741 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:42Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.495777 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:42Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.510051 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:42Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.524643 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:42Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.536436 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:42Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.549995 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:42Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.565262 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:42Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.574141 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.574193 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.574204 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.574227 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.574240 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:42Z","lastTransitionTime":"2026-01-30T10:54:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.595771 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e18f29d7b517604067a2dfe68f1f286ce41836fc0ad0f20e0c3b17e1517e6d3f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:54:40Z\\\",\\\"message\\\":\\\"8 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0130 10:54:40.395332 6178 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 10:54:40.396079 6178 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0130 10:54:40.396128 6178 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0130 10:54:40.396135 6178 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0130 10:54:40.396168 6178 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0130 10:54:40.396181 6178 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0130 10:54:40.396190 6178 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0130 10:54:40.396296 6178 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0130 10:54:40.396323 6178 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0130 10:54:40.396347 6178 factory.go:656] Stopping watch factory\\\\nI0130 10:54:40.396364 6178 ovnkube.go:599] Stopped ovnkube\\\\nI0130 10:54:40.396389 6178 handler.go:208] Removed *v1.Node event handler 2\\\\nI01\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:42Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.618310 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:42Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.633953 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:42Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.647208 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:42Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.676554 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.676599 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.676610 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.676629 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.676639 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:42Z","lastTransitionTime":"2026-01-30T10:54:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.779283 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.779340 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.779353 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.779374 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.779388 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:42Z","lastTransitionTime":"2026-01-30T10:54:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.881978 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.882008 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.882017 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.882033 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.882042 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:42Z","lastTransitionTime":"2026-01-30T10:54:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.984691 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.984781 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.984800 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.984822 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:42 crc kubenswrapper[4869]: I0130 10:54:42.984848 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:42Z","lastTransitionTime":"2026-01-30T10:54:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.087629 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.087698 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.087725 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.087748 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.087764 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:43Z","lastTransitionTime":"2026-01-30T10:54:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.122202 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 09:52:07.701207583 +0000 UTC Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.132516 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:43 crc kubenswrapper[4869]: E0130 10:54:43.133674 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.191091 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.191159 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.191175 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.191203 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.191220 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:43Z","lastTransitionTime":"2026-01-30T10:54:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.294340 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.294376 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.294387 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.294406 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.294415 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:43Z","lastTransitionTime":"2026-01-30T10:54:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.396869 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.396937 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.396967 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.396990 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.397009 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:43Z","lastTransitionTime":"2026-01-30T10:54:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.407836 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvdq_3e4cac66-8338-46fe-8296-ce9dbd2257bd/ovnkube-controller/1.log" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.408521 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvdq_3e4cac66-8338-46fe-8296-ce9dbd2257bd/ovnkube-controller/0.log" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.412241 4869 generic.go:334] "Generic (PLEG): container finished" podID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerID="00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac" exitCode=1 Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.412304 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerDied","Data":"00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac"} Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.412359 4869 scope.go:117] "RemoveContainer" containerID="e18f29d7b517604067a2dfe68f1f286ce41836fc0ad0f20e0c3b17e1517e6d3f" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.413197 4869 scope.go:117] "RemoveContainer" containerID="00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac" Jan 30 10:54:43 crc kubenswrapper[4869]: E0130 10:54:43.413460 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-twvdq_openshift-ovn-kubernetes(3e4cac66-8338-46fe-8296-ce9dbd2257bd)\"" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.440094 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e18f29d7b517604067a2dfe68f1f286ce41836fc0ad0f20e0c3b17e1517e6d3f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:54:40Z\\\",\\\"message\\\":\\\"8 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0130 10:54:40.395332 6178 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 10:54:40.396079 6178 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0130 10:54:40.396128 6178 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0130 10:54:40.396135 6178 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0130 10:54:40.396168 6178 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0130 10:54:40.396181 6178 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0130 10:54:40.396190 6178 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0130 10:54:40.396296 6178 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0130 10:54:40.396323 6178 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0130 10:54:40.396347 6178 factory.go:656] Stopping watch factory\\\\nI0130 10:54:40.396364 6178 ovnkube.go:599] Stopped ovnkube\\\\nI0130 10:54:40.396389 6178 handler.go:208] Removed *v1.Node event handler 2\\\\nI01\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:54:42Z\\\",\\\"message\\\":\\\" reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489062 6327 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489232 6327 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489263 6327 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.491799 6327 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0130 10:54:42.491928 6327 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0130 10:54:42.491943 6327 handler.go:208] Removed *v1.Node event handler 2\\\\nI0130 10:54:42.491971 6327 factory.go:656] Stopping watch factory\\\\nI0130 10:54:42.491999 6327 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0130 10:54:42.504079 6327 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0130 10:54:42.504111 6327 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0130 10:54:42.504172 6327 ovnkube.go:599] Stopped ovnkube\\\\nI0130 10:54:42.504215 6327 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0130 10:54:42.504353 6327 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:43Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.462897 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:43Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.479149 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:43Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.493963 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:43Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.500673 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.500867 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.500964 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.501062 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.501149 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:43Z","lastTransitionTime":"2026-01-30T10:54:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.508684 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:43Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.525275 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:43Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.540012 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:43Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.553301 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:43Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.575231 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:43Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.590670 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:43Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.602539 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:43Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.604356 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.604414 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.604426 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.604444 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.604457 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:43Z","lastTransitionTime":"2026-01-30T10:54:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.617374 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:43Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.626196 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:43Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.638279 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:43Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.653057 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:43Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.707289 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.707329 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.707339 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.707353 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.707367 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:43Z","lastTransitionTime":"2026-01-30T10:54:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.810945 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.811001 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.811017 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.811044 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.811063 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:43Z","lastTransitionTime":"2026-01-30T10:54:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.889977 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999"] Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.890740 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.896413 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.896630 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.913856 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.913893 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.913903 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.913924 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.913935 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:43Z","lastTransitionTime":"2026-01-30T10:54:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.914485 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:43Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.928161 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:43Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.949750 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:43Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.969686 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:43Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:43 crc kubenswrapper[4869]: I0130 10:54:43.986335 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:43Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.003334 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:43Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.016341 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.016384 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.016394 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.016411 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.016423 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:44Z","lastTransitionTime":"2026-01-30T10:54:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.021908 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.044539 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e18f29d7b517604067a2dfe68f1f286ce41836fc0ad0f20e0c3b17e1517e6d3f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:54:40Z\\\",\\\"message\\\":\\\"8 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0130 10:54:40.395332 6178 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 10:54:40.396079 6178 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0130 10:54:40.396128 6178 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0130 10:54:40.396135 6178 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0130 10:54:40.396168 6178 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0130 10:54:40.396181 6178 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0130 10:54:40.396190 6178 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0130 10:54:40.396296 6178 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0130 10:54:40.396323 6178 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0130 10:54:40.396347 6178 factory.go:656] Stopping watch factory\\\\nI0130 10:54:40.396364 6178 ovnkube.go:599] Stopped ovnkube\\\\nI0130 10:54:40.396389 6178 handler.go:208] Removed *v1.Node event handler 2\\\\nI01\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:54:42Z\\\",\\\"message\\\":\\\" reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489062 6327 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489232 6327 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489263 6327 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.491799 6327 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0130 10:54:42.491928 6327 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0130 10:54:42.491943 6327 handler.go:208] Removed *v1.Node event handler 2\\\\nI0130 10:54:42.491971 6327 factory.go:656] Stopping watch factory\\\\nI0130 10:54:42.491999 6327 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0130 10:54:42.504079 6327 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0130 10:54:42.504111 6327 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0130 10:54:42.504172 6327 ovnkube.go:599] Stopped ovnkube\\\\nI0130 10:54:42.504215 6327 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0130 10:54:42.504353 6327 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.064511 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/23455e82-301e-4eaa-9358-5f00c6840ca7-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-ww999\" (UID: \"23455e82-301e-4eaa-9358-5f00c6840ca7\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.064556 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/23455e82-301e-4eaa-9358-5f00c6840ca7-env-overrides\") pod \"ovnkube-control-plane-749d76644c-ww999\" (UID: \"23455e82-301e-4eaa-9358-5f00c6840ca7\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.064587 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/23455e82-301e-4eaa-9358-5f00c6840ca7-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-ww999\" (UID: \"23455e82-301e-4eaa-9358-5f00c6840ca7\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.064608 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzkfw\" (UniqueName: \"kubernetes.io/projected/23455e82-301e-4eaa-9358-5f00c6840ca7-kube-api-access-zzkfw\") pod \"ovnkube-control-plane-749d76644c-ww999\" (UID: \"23455e82-301e-4eaa-9358-5f00c6840ca7\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.066634 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23455e82-301e-4eaa-9358-5f00c6840ca7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-ww999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.084183 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.098364 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.114786 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.119183 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.119237 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.119253 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.119275 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.119287 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:44Z","lastTransitionTime":"2026-01-30T10:54:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.122366 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 20:43:04.979362295 +0000 UTC Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.132200 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.132540 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:54:44 crc kubenswrapper[4869]: E0130 10:54:44.132699 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:54:44 crc kubenswrapper[4869]: E0130 10:54:44.132822 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.133229 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.150577 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.165485 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/23455e82-301e-4eaa-9358-5f00c6840ca7-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-ww999\" (UID: \"23455e82-301e-4eaa-9358-5f00c6840ca7\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.165529 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/23455e82-301e-4eaa-9358-5f00c6840ca7-env-overrides\") pod \"ovnkube-control-plane-749d76644c-ww999\" (UID: \"23455e82-301e-4eaa-9358-5f00c6840ca7\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.165559 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/23455e82-301e-4eaa-9358-5f00c6840ca7-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-ww999\" (UID: \"23455e82-301e-4eaa-9358-5f00c6840ca7\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.165580 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzkfw\" (UniqueName: \"kubernetes.io/projected/23455e82-301e-4eaa-9358-5f00c6840ca7-kube-api-access-zzkfw\") pod \"ovnkube-control-plane-749d76644c-ww999\" (UID: \"23455e82-301e-4eaa-9358-5f00c6840ca7\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.166275 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/23455e82-301e-4eaa-9358-5f00c6840ca7-env-overrides\") pod \"ovnkube-control-plane-749d76644c-ww999\" (UID: \"23455e82-301e-4eaa-9358-5f00c6840ca7\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.166349 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/23455e82-301e-4eaa-9358-5f00c6840ca7-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-ww999\" (UID: \"23455e82-301e-4eaa-9358-5f00c6840ca7\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.169693 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.174012 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/23455e82-301e-4eaa-9358-5f00c6840ca7-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-ww999\" (UID: \"23455e82-301e-4eaa-9358-5f00c6840ca7\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.184809 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzkfw\" (UniqueName: \"kubernetes.io/projected/23455e82-301e-4eaa-9358-5f00c6840ca7-kube-api-access-zzkfw\") pod \"ovnkube-control-plane-749d76644c-ww999\" (UID: \"23455e82-301e-4eaa-9358-5f00c6840ca7\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.186841 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.207302 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.222894 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.222937 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.222948 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.222965 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.222976 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:44Z","lastTransitionTime":"2026-01-30T10:54:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.325263 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.325343 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.325354 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.325373 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.325383 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:44Z","lastTransitionTime":"2026-01-30T10:54:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.419759 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvdq_3e4cac66-8338-46fe-8296-ce9dbd2257bd/ovnkube-controller/1.log" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.423348 4869 scope.go:117] "RemoveContainer" containerID="00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac" Jan 30 10:54:44 crc kubenswrapper[4869]: E0130 10:54:44.423558 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-twvdq_openshift-ovn-kubernetes(3e4cac66-8338-46fe-8296-ce9dbd2257bd)\"" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.425998 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" event={"ID":"23455e82-301e-4eaa-9358-5f00c6840ca7","Type":"ContainerStarted","Data":"5e2e1e8e7fa5e5d474f07157bd705f723826f54638e394576f3013b24492ccde"} Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.428385 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.428428 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.428440 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.428457 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.428550 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:44Z","lastTransitionTime":"2026-01-30T10:54:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.440282 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.460577 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.474043 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23455e82-301e-4eaa-9358-5f00c6840ca7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-ww999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.502729 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.518499 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.530957 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.530996 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.531009 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.531025 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.531038 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:44Z","lastTransitionTime":"2026-01-30T10:54:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.535147 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.548905 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.564361 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.582730 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:54:42Z\\\",\\\"message\\\":\\\" reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489062 6327 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489232 6327 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489263 6327 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.491799 6327 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0130 10:54:42.491928 6327 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0130 10:54:42.491943 6327 handler.go:208] Removed *v1.Node event handler 2\\\\nI0130 10:54:42.491971 6327 factory.go:656] Stopping watch factory\\\\nI0130 10:54:42.491999 6327 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0130 10:54:42.504079 6327 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0130 10:54:42.504111 6327 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0130 10:54:42.504172 6327 ovnkube.go:599] Stopped ovnkube\\\\nI0130 10:54:42.504215 6327 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0130 10:54:42.504353 6327 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-twvdq_openshift-ovn-kubernetes(3e4cac66-8338-46fe-8296-ce9dbd2257bd)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.598848 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.612628 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.626726 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.636587 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.636644 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.636657 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.636694 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.636760 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:44Z","lastTransitionTime":"2026-01-30T10:54:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.646225 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.663827 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-2krt6"] Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.664384 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:54:44 crc kubenswrapper[4869]: E0130 10:54:44.664449 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.667312 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.685048 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.699570 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.718267 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.733260 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.739243 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.739279 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.739290 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.739305 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.739332 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:44Z","lastTransitionTime":"2026-01-30T10:54:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.746803 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2krt6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35533ad8-7435-413d-bad1-05a0ca183c0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2krt6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.759355 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.771068 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.774517 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs\") pod \"network-metrics-daemon-2krt6\" (UID: \"35533ad8-7435-413d-bad1-05a0ca183c0d\") " pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.774620 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stx4p\" (UniqueName: \"kubernetes.io/projected/35533ad8-7435-413d-bad1-05a0ca183c0d-kube-api-access-stx4p\") pod \"network-metrics-daemon-2krt6\" (UID: \"35533ad8-7435-413d-bad1-05a0ca183c0d\") " pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.781931 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.795462 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.806878 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.842527 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.843000 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.843125 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.843206 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.843277 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:44Z","lastTransitionTime":"2026-01-30T10:54:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.847754 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.876047 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs\") pod \"network-metrics-daemon-2krt6\" (UID: \"35533ad8-7435-413d-bad1-05a0ca183c0d\") " pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.876139 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stx4p\" (UniqueName: \"kubernetes.io/projected/35533ad8-7435-413d-bad1-05a0ca183c0d-kube-api-access-stx4p\") pod \"network-metrics-daemon-2krt6\" (UID: \"35533ad8-7435-413d-bad1-05a0ca183c0d\") " pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:54:44 crc kubenswrapper[4869]: E0130 10:54:44.876667 4869 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 10:54:44 crc kubenswrapper[4869]: E0130 10:54:44.876909 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs podName:35533ad8-7435-413d-bad1-05a0ca183c0d nodeName:}" failed. No retries permitted until 2026-01-30 10:54:45.376881917 +0000 UTC m=+35.926757983 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs") pod "network-metrics-daemon-2krt6" (UID: "35533ad8-7435-413d-bad1-05a0ca183c0d") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.882120 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.898828 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stx4p\" (UniqueName: \"kubernetes.io/projected/35533ad8-7435-413d-bad1-05a0ca183c0d-kube-api-access-stx4p\") pod \"network-metrics-daemon-2krt6\" (UID: \"35533ad8-7435-413d-bad1-05a0ca183c0d\") " pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.903040 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.922773 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:54:42Z\\\",\\\"message\\\":\\\" reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489062 6327 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489232 6327 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489263 6327 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.491799 6327 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0130 10:54:42.491928 6327 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0130 10:54:42.491943 6327 handler.go:208] Removed *v1.Node event handler 2\\\\nI0130 10:54:42.491971 6327 factory.go:656] Stopping watch factory\\\\nI0130 10:54:42.491999 6327 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0130 10:54:42.504079 6327 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0130 10:54:42.504111 6327 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0130 10:54:42.504172 6327 ovnkube.go:599] Stopped ovnkube\\\\nI0130 10:54:42.504215 6327 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0130 10:54:42.504353 6327 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-twvdq_openshift-ovn-kubernetes(3e4cac66-8338-46fe-8296-ce9dbd2257bd)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.939034 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23455e82-301e-4eaa-9358-5f00c6840ca7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-ww999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.945644 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.945688 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.945703 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.945743 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.945760 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:44Z","lastTransitionTime":"2026-01-30T10:54:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.961789 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.976255 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:44 crc kubenswrapper[4869]: I0130 10:54:44.990285 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:44Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.019304 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:45Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.048994 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.049316 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.049378 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.049734 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.049808 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:45Z","lastTransitionTime":"2026-01-30T10:54:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.122983 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 23:12:05.914931842 +0000 UTC Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.132436 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:45 crc kubenswrapper[4869]: E0130 10:54:45.132645 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.152969 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.153015 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.153025 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.153044 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.153057 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:45Z","lastTransitionTime":"2026-01-30T10:54:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.256026 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.256114 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.256125 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.256138 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.256148 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:45Z","lastTransitionTime":"2026-01-30T10:54:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.359107 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.359161 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.359174 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.359192 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.359206 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:45Z","lastTransitionTime":"2026-01-30T10:54:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.381823 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs\") pod \"network-metrics-daemon-2krt6\" (UID: \"35533ad8-7435-413d-bad1-05a0ca183c0d\") " pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:54:45 crc kubenswrapper[4869]: E0130 10:54:45.382018 4869 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 10:54:45 crc kubenswrapper[4869]: E0130 10:54:45.382121 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs podName:35533ad8-7435-413d-bad1-05a0ca183c0d nodeName:}" failed. No retries permitted until 2026-01-30 10:54:46.382095007 +0000 UTC m=+36.931971263 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs") pod "network-metrics-daemon-2krt6" (UID: "35533ad8-7435-413d-bad1-05a0ca183c0d") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.433797 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" event={"ID":"23455e82-301e-4eaa-9358-5f00c6840ca7","Type":"ContainerStarted","Data":"efa7a5f4026112fef4830817779f204590f20cea60b32bd1033ee99040375e03"} Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.433889 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" event={"ID":"23455e82-301e-4eaa-9358-5f00c6840ca7","Type":"ContainerStarted","Data":"d2d40b6dbc7efd9d8c9011700f004948317ab65b7d5c505b4b83f0751d5b8fe5"} Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.450132 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:45Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.462135 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.462186 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.462198 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.462219 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.462234 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:45Z","lastTransitionTime":"2026-01-30T10:54:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.467512 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:45Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.486788 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:45Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.525520 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:54:42Z\\\",\\\"message\\\":\\\" reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489062 6327 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489232 6327 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489263 6327 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.491799 6327 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0130 10:54:42.491928 6327 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0130 10:54:42.491943 6327 handler.go:208] Removed *v1.Node event handler 2\\\\nI0130 10:54:42.491971 6327 factory.go:656] Stopping watch factory\\\\nI0130 10:54:42.491999 6327 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0130 10:54:42.504079 6327 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0130 10:54:42.504111 6327 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0130 10:54:42.504172 6327 ovnkube.go:599] Stopped ovnkube\\\\nI0130 10:54:42.504215 6327 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0130 10:54:42.504353 6327 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-twvdq_openshift-ovn-kubernetes(3e4cac66-8338-46fe-8296-ce9dbd2257bd)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:45Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.542644 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23455e82-301e-4eaa-9358-5f00c6840ca7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d40b6dbc7efd9d8c9011700f004948317ab65b7d5c505b4b83f0751d5b8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa7a5f4026112fef4830817779f204590f20cea60b32bd1033ee99040375e03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-ww999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:45Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.564947 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.564981 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.564990 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.565006 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.565015 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:45Z","lastTransitionTime":"2026-01-30T10:54:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.575245 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:45Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.596250 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:45Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.611233 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:45Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.624313 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:45Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.639021 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:45Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.654272 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:45Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.668918 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.668963 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.668974 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.669002 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.669015 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:45Z","lastTransitionTime":"2026-01-30T10:54:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.669129 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2krt6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35533ad8-7435-413d-bad1-05a0ca183c0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2krt6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:45Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.685123 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:45Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.700444 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:45Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.714067 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:45Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.728628 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:45Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.740955 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:45Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.772211 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.772266 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.772276 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.772292 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.772302 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:45Z","lastTransitionTime":"2026-01-30T10:54:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.875124 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.875168 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.875178 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.875193 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.875202 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:45Z","lastTransitionTime":"2026-01-30T10:54:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.886936 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.887054 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.887080 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:45 crc kubenswrapper[4869]: E0130 10:54:45.887118 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:55:01.887089901 +0000 UTC m=+52.436966107 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:54:45 crc kubenswrapper[4869]: E0130 10:54:45.887165 4869 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 10:54:45 crc kubenswrapper[4869]: E0130 10:54:45.887220 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 10:55:01.887206304 +0000 UTC m=+52.437082370 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 10:54:45 crc kubenswrapper[4869]: E0130 10:54:45.887313 4869 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 10:54:45 crc kubenswrapper[4869]: E0130 10:54:45.887383 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 10:55:01.887372409 +0000 UTC m=+52.437248695 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.977852 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.977895 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.977903 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.977916 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.977926 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:45Z","lastTransitionTime":"2026-01-30T10:54:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.988402 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:54:45 crc kubenswrapper[4869]: I0130 10:54:45.988457 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:54:45 crc kubenswrapper[4869]: E0130 10:54:45.988586 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 10:54:45 crc kubenswrapper[4869]: E0130 10:54:45.988603 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 10:54:45 crc kubenswrapper[4869]: E0130 10:54:45.988618 4869 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:54:45 crc kubenswrapper[4869]: E0130 10:54:45.988628 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 10:54:45 crc kubenswrapper[4869]: E0130 10:54:45.988663 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-30 10:55:01.988649466 +0000 UTC m=+52.538525522 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:54:45 crc kubenswrapper[4869]: E0130 10:54:45.988661 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 10:54:45 crc kubenswrapper[4869]: E0130 10:54:45.988685 4869 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:54:45 crc kubenswrapper[4869]: E0130 10:54:45.988758 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-30 10:55:01.988740488 +0000 UTC m=+52.538616554 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.076894 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.076935 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.076946 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.076963 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.076974 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:46Z","lastTransitionTime":"2026-01-30T10:54:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:46 crc kubenswrapper[4869]: E0130 10:54:46.090868 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:46Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.094396 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.094438 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.094449 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.094464 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.094474 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:46Z","lastTransitionTime":"2026-01-30T10:54:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:46 crc kubenswrapper[4869]: E0130 10:54:46.106472 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:46Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.110449 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.110475 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.110484 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.110499 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.110508 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:46Z","lastTransitionTime":"2026-01-30T10:54:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.123396 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 11:48:17.015994529 +0000 UTC Jan 30 10:54:46 crc kubenswrapper[4869]: E0130 10:54:46.123626 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:46Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.128134 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.128169 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.128180 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.128198 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.128211 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:46Z","lastTransitionTime":"2026-01-30T10:54:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.132360 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.132404 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.132356 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:54:46 crc kubenswrapper[4869]: E0130 10:54:46.132496 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:54:46 crc kubenswrapper[4869]: E0130 10:54:46.132630 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:54:46 crc kubenswrapper[4869]: E0130 10:54:46.132689 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:54:46 crc kubenswrapper[4869]: E0130 10:54:46.143549 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:46Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.147364 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.147410 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.147422 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.147439 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.147452 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:46Z","lastTransitionTime":"2026-01-30T10:54:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:46 crc kubenswrapper[4869]: E0130 10:54:46.159859 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:46Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:46 crc kubenswrapper[4869]: E0130 10:54:46.159996 4869 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.161872 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.161908 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.161918 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.161934 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.161943 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:46Z","lastTransitionTime":"2026-01-30T10:54:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.263778 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.263835 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.263846 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.263864 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.263877 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:46Z","lastTransitionTime":"2026-01-30T10:54:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.366520 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.366596 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.366607 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.366628 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.366641 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:46Z","lastTransitionTime":"2026-01-30T10:54:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.393271 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs\") pod \"network-metrics-daemon-2krt6\" (UID: \"35533ad8-7435-413d-bad1-05a0ca183c0d\") " pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:54:46 crc kubenswrapper[4869]: E0130 10:54:46.393444 4869 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 10:54:46 crc kubenswrapper[4869]: E0130 10:54:46.393496 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs podName:35533ad8-7435-413d-bad1-05a0ca183c0d nodeName:}" failed. No retries permitted until 2026-01-30 10:54:48.393480175 +0000 UTC m=+38.943356241 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs") pod "network-metrics-daemon-2krt6" (UID: "35533ad8-7435-413d-bad1-05a0ca183c0d") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.469144 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.469209 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.469221 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.469241 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.469260 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:46Z","lastTransitionTime":"2026-01-30T10:54:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.572369 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.572418 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.572428 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.572441 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.572450 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:46Z","lastTransitionTime":"2026-01-30T10:54:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.675990 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.676080 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.676099 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.676143 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.676162 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:46Z","lastTransitionTime":"2026-01-30T10:54:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.779097 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.779182 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.779194 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.779210 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.779222 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:46Z","lastTransitionTime":"2026-01-30T10:54:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.882974 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.883041 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.883102 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.883127 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.883148 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:46Z","lastTransitionTime":"2026-01-30T10:54:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.985389 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.985430 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.985438 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.985452 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:46 crc kubenswrapper[4869]: I0130 10:54:46.985463 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:46Z","lastTransitionTime":"2026-01-30T10:54:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.088761 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.088818 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.088831 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.088848 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.088858 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:47Z","lastTransitionTime":"2026-01-30T10:54:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.124364 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 10:19:31.171234284 +0000 UTC Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.132988 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:47 crc kubenswrapper[4869]: E0130 10:54:47.133183 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.192016 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.192058 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.192070 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.192088 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.192100 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:47Z","lastTransitionTime":"2026-01-30T10:54:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.295645 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.295703 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.295744 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.295769 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.295788 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:47Z","lastTransitionTime":"2026-01-30T10:54:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.398675 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.398775 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.398816 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.398836 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.398847 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:47Z","lastTransitionTime":"2026-01-30T10:54:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.501329 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.501378 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.501393 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.501413 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.501424 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:47Z","lastTransitionTime":"2026-01-30T10:54:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.605190 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.605248 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.605264 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.605285 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.605295 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:47Z","lastTransitionTime":"2026-01-30T10:54:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.708523 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.708584 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.708603 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.708630 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.708650 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:47Z","lastTransitionTime":"2026-01-30T10:54:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.812029 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.812095 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.812107 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.812128 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.812142 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:47Z","lastTransitionTime":"2026-01-30T10:54:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.915164 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.915201 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.915210 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.915229 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:47 crc kubenswrapper[4869]: I0130 10:54:47.915242 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:47Z","lastTransitionTime":"2026-01-30T10:54:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.020067 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.020121 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.020133 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.020155 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.020181 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:48Z","lastTransitionTime":"2026-01-30T10:54:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.123928 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.124021 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.124048 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.124084 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.124110 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:48Z","lastTransitionTime":"2026-01-30T10:54:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.124867 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 14:47:46.702325512 +0000 UTC Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.132276 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.132406 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.132291 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:54:48 crc kubenswrapper[4869]: E0130 10:54:48.132519 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:54:48 crc kubenswrapper[4869]: E0130 10:54:48.132683 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:54:48 crc kubenswrapper[4869]: E0130 10:54:48.132840 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.226385 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.226431 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.226444 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.226464 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.226475 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:48Z","lastTransitionTime":"2026-01-30T10:54:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.328464 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.328502 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.328514 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.328530 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.328542 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:48Z","lastTransitionTime":"2026-01-30T10:54:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.415491 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs\") pod \"network-metrics-daemon-2krt6\" (UID: \"35533ad8-7435-413d-bad1-05a0ca183c0d\") " pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:54:48 crc kubenswrapper[4869]: E0130 10:54:48.415617 4869 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 10:54:48 crc kubenswrapper[4869]: E0130 10:54:48.415677 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs podName:35533ad8-7435-413d-bad1-05a0ca183c0d nodeName:}" failed. No retries permitted until 2026-01-30 10:54:52.415663444 +0000 UTC m=+42.965539510 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs") pod "network-metrics-daemon-2krt6" (UID: "35533ad8-7435-413d-bad1-05a0ca183c0d") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.430393 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.430424 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.430433 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.430446 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.430454 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:48Z","lastTransitionTime":"2026-01-30T10:54:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.532771 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.532842 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.532863 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.532894 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.532914 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:48Z","lastTransitionTime":"2026-01-30T10:54:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.635862 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.636307 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.636450 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.636552 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.636637 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:48Z","lastTransitionTime":"2026-01-30T10:54:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.739838 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.739873 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.739883 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.739900 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.739911 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:48Z","lastTransitionTime":"2026-01-30T10:54:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.842972 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.843015 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.843025 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.843042 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.843056 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:48Z","lastTransitionTime":"2026-01-30T10:54:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.946460 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.946513 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.946535 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.946559 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:48 crc kubenswrapper[4869]: I0130 10:54:48.946579 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:48Z","lastTransitionTime":"2026-01-30T10:54:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.052886 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.053386 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.053531 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.053625 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.053726 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:49Z","lastTransitionTime":"2026-01-30T10:54:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.125647 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 01:15:03.104648772 +0000 UTC Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.132948 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:49 crc kubenswrapper[4869]: E0130 10:54:49.133086 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.133798 4869 scope.go:117] "RemoveContainer" containerID="888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.158932 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.159073 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.159104 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.159140 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.159150 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:49Z","lastTransitionTime":"2026-01-30T10:54:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.261742 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.261795 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.261807 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.261826 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.261841 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:49Z","lastTransitionTime":"2026-01-30T10:54:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.364000 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.364040 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.364051 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.364098 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.364117 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:49Z","lastTransitionTime":"2026-01-30T10:54:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.449767 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.452369 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7"} Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.452771 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.467745 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.467810 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.467829 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.467852 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.467866 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:49Z","lastTransitionTime":"2026-01-30T10:54:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.468528 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:49Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.482048 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:49Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.498956 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:49Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.518950 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:54:42Z\\\",\\\"message\\\":\\\" reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489062 6327 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489232 6327 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489263 6327 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.491799 6327 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0130 10:54:42.491928 6327 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0130 10:54:42.491943 6327 handler.go:208] Removed *v1.Node event handler 2\\\\nI0130 10:54:42.491971 6327 factory.go:656] Stopping watch factory\\\\nI0130 10:54:42.491999 6327 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0130 10:54:42.504079 6327 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0130 10:54:42.504111 6327 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0130 10:54:42.504172 6327 ovnkube.go:599] Stopped ovnkube\\\\nI0130 10:54:42.504215 6327 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0130 10:54:42.504353 6327 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-twvdq_openshift-ovn-kubernetes(3e4cac66-8338-46fe-8296-ce9dbd2257bd)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:49Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.534726 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23455e82-301e-4eaa-9358-5f00c6840ca7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d40b6dbc7efd9d8c9011700f004948317ab65b7d5c505b4b83f0751d5b8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa7a5f4026112fef4830817779f204590f20cea60b32bd1033ee99040375e03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-ww999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:49Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.560302 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:49Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.571292 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.571570 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.571683 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.571796 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.571889 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:49Z","lastTransitionTime":"2026-01-30T10:54:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.584731 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:49Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.598435 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:49Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.612928 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:49Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.628560 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:49Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.642436 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:49Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.659701 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2krt6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35533ad8-7435-413d-bad1-05a0ca183c0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2krt6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:49Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.674925 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.675152 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.675238 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.675320 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.675395 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:49Z","lastTransitionTime":"2026-01-30T10:54:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.680941 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:49Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.697409 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:49Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.715937 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:49Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.742383 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:49Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.758867 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:49Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.779121 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.779168 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.779189 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.779221 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.779238 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:49Z","lastTransitionTime":"2026-01-30T10:54:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.883544 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.883614 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.883633 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.883661 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.883685 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:49Z","lastTransitionTime":"2026-01-30T10:54:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.988132 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.988189 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.988211 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.988243 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:49 crc kubenswrapper[4869]: I0130 10:54:49.988264 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:49Z","lastTransitionTime":"2026-01-30T10:54:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.092752 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.093188 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.093279 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.093352 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.093435 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:50Z","lastTransitionTime":"2026-01-30T10:54:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.125990 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 00:52:53.372082568 +0000 UTC Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.132287 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:54:50 crc kubenswrapper[4869]: E0130 10:54:50.132850 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.132941 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:54:50 crc kubenswrapper[4869]: E0130 10:54:50.133097 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.133270 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:54:50 crc kubenswrapper[4869]: E0130 10:54:50.133562 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.166991 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:50Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.186012 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:50Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.196370 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.196760 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.196865 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.196968 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.197118 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:50Z","lastTransitionTime":"2026-01-30T10:54:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.205847 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:50Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.219733 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:50Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.248042 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:50Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.275753 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:54:42Z\\\",\\\"message\\\":\\\" reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489062 6327 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489232 6327 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489263 6327 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.491799 6327 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0130 10:54:42.491928 6327 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0130 10:54:42.491943 6327 handler.go:208] Removed *v1.Node event handler 2\\\\nI0130 10:54:42.491971 6327 factory.go:656] Stopping watch factory\\\\nI0130 10:54:42.491999 6327 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0130 10:54:42.504079 6327 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0130 10:54:42.504111 6327 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0130 10:54:42.504172 6327 ovnkube.go:599] Stopped ovnkube\\\\nI0130 10:54:42.504215 6327 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0130 10:54:42.504353 6327 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-twvdq_openshift-ovn-kubernetes(3e4cac66-8338-46fe-8296-ce9dbd2257bd)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:50Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.291869 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23455e82-301e-4eaa-9358-5f00c6840ca7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d40b6dbc7efd9d8c9011700f004948317ab65b7d5c505b4b83f0751d5b8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa7a5f4026112fef4830817779f204590f20cea60b32bd1033ee99040375e03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-ww999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:50Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.300454 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.300504 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.300517 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.300539 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.300555 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:50Z","lastTransitionTime":"2026-01-30T10:54:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.308295 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:50Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.321861 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:50Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.337756 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:50Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.356237 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:50Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.369402 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:50Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.382477 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:50Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.393120 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:50Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.404272 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.404321 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.404353 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.404369 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.404380 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:50Z","lastTransitionTime":"2026-01-30T10:54:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.407541 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2krt6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35533ad8-7435-413d-bad1-05a0ca183c0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2krt6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:50Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.421512 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:50Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.434458 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:50Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.507703 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.508050 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.508173 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.508313 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.508421 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:50Z","lastTransitionTime":"2026-01-30T10:54:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.612135 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.612227 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.612265 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.612293 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.612312 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:50Z","lastTransitionTime":"2026-01-30T10:54:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.720079 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.720130 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.720141 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.720160 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.720172 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:50Z","lastTransitionTime":"2026-01-30T10:54:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.823804 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.823894 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.823911 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.823936 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.823953 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:50Z","lastTransitionTime":"2026-01-30T10:54:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.927659 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.927772 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.927839 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.927866 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:50 crc kubenswrapper[4869]: I0130 10:54:50.927885 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:50Z","lastTransitionTime":"2026-01-30T10:54:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.031146 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.031212 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.031232 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.031262 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.031284 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:51Z","lastTransitionTime":"2026-01-30T10:54:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.126326 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 07:55:12.714265286 +0000 UTC Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.132657 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:51 crc kubenswrapper[4869]: E0130 10:54:51.132843 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.135028 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.135081 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.135118 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.135155 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.135175 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:51Z","lastTransitionTime":"2026-01-30T10:54:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.238387 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.238867 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.238974 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.239044 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.239105 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:51Z","lastTransitionTime":"2026-01-30T10:54:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.341659 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.341691 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.341699 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.341731 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.341742 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:51Z","lastTransitionTime":"2026-01-30T10:54:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.445019 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.445129 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.445161 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.445199 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.445225 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:51Z","lastTransitionTime":"2026-01-30T10:54:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.548513 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.548896 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.548906 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.548922 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.548932 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:51Z","lastTransitionTime":"2026-01-30T10:54:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.651391 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.651469 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.651485 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.651508 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.651521 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:51Z","lastTransitionTime":"2026-01-30T10:54:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.754075 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.754109 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.754118 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.754130 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.754141 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:51Z","lastTransitionTime":"2026-01-30T10:54:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.858854 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.858906 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.858922 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.858945 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.858960 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:51Z","lastTransitionTime":"2026-01-30T10:54:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.962213 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.962287 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.962312 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.962342 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:51 crc kubenswrapper[4869]: I0130 10:54:51.962359 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:51Z","lastTransitionTime":"2026-01-30T10:54:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.065579 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.065636 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.065651 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.065675 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.065692 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:52Z","lastTransitionTime":"2026-01-30T10:54:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.126754 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 02:43:59.199171641 +0000 UTC Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.132276 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.132381 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:54:52 crc kubenswrapper[4869]: E0130 10:54:52.132516 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.132399 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:54:52 crc kubenswrapper[4869]: E0130 10:54:52.132669 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:54:52 crc kubenswrapper[4869]: E0130 10:54:52.132775 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.169690 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.169790 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.169810 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.169843 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.169864 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:52Z","lastTransitionTime":"2026-01-30T10:54:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.273281 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.273339 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.273351 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.273376 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.273391 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:52Z","lastTransitionTime":"2026-01-30T10:54:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.376902 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.377046 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.377085 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.377118 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.377139 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:52Z","lastTransitionTime":"2026-01-30T10:54:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.460680 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs\") pod \"network-metrics-daemon-2krt6\" (UID: \"35533ad8-7435-413d-bad1-05a0ca183c0d\") " pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:54:52 crc kubenswrapper[4869]: E0130 10:54:52.460976 4869 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 10:54:52 crc kubenswrapper[4869]: E0130 10:54:52.461122 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs podName:35533ad8-7435-413d-bad1-05a0ca183c0d nodeName:}" failed. No retries permitted until 2026-01-30 10:55:00.461083602 +0000 UTC m=+51.010959708 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs") pod "network-metrics-daemon-2krt6" (UID: "35533ad8-7435-413d-bad1-05a0ca183c0d") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.480328 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.480371 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.480384 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.480398 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.480409 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:52Z","lastTransitionTime":"2026-01-30T10:54:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.583633 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.583696 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.583751 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.583772 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.583789 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:52Z","lastTransitionTime":"2026-01-30T10:54:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.687057 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.687137 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.687163 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.687199 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.687226 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:52Z","lastTransitionTime":"2026-01-30T10:54:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.791610 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.791697 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.791764 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.791801 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.791828 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:52Z","lastTransitionTime":"2026-01-30T10:54:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.895093 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.895154 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.895166 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.895187 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.895200 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:52Z","lastTransitionTime":"2026-01-30T10:54:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.999782 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.999850 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.999862 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:52 crc kubenswrapper[4869]: I0130 10:54:52.999881 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:52.999894 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:52Z","lastTransitionTime":"2026-01-30T10:54:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.102856 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.102897 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.102905 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.102920 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.102930 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:53Z","lastTransitionTime":"2026-01-30T10:54:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.127269 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 19:01:45.833049463 +0000 UTC Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.132671 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:53 crc kubenswrapper[4869]: E0130 10:54:53.132870 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.206262 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.206307 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.206316 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.206332 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.206343 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:53Z","lastTransitionTime":"2026-01-30T10:54:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.309597 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.309654 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.309667 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.309689 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.309721 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:53Z","lastTransitionTime":"2026-01-30T10:54:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.411996 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.412037 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.412047 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.412065 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.412081 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:53Z","lastTransitionTime":"2026-01-30T10:54:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.514184 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.514218 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.514228 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.514243 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.514254 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:53Z","lastTransitionTime":"2026-01-30T10:54:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.616439 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.616485 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.616501 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.616518 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.616530 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:53Z","lastTransitionTime":"2026-01-30T10:54:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.719049 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.719090 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.719102 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.719121 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.719133 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:53Z","lastTransitionTime":"2026-01-30T10:54:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.821034 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.821086 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.821101 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.821124 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.821139 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:53Z","lastTransitionTime":"2026-01-30T10:54:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.924034 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.924090 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.924099 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.924117 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:53 crc kubenswrapper[4869]: I0130 10:54:53.924130 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:53Z","lastTransitionTime":"2026-01-30T10:54:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.027105 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.027162 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.027171 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.027190 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.027201 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:54Z","lastTransitionTime":"2026-01-30T10:54:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.128154 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 03:50:49.569781307 +0000 UTC Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.130597 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.130662 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.130675 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.130697 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.130726 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:54Z","lastTransitionTime":"2026-01-30T10:54:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.132021 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.132121 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.132176 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:54:54 crc kubenswrapper[4869]: E0130 10:54:54.132286 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:54:54 crc kubenswrapper[4869]: E0130 10:54:54.132414 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:54:54 crc kubenswrapper[4869]: E0130 10:54:54.132496 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.233932 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.233983 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.233994 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.234015 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.234030 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:54Z","lastTransitionTime":"2026-01-30T10:54:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.337569 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.337622 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.337634 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.337656 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.337669 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:54Z","lastTransitionTime":"2026-01-30T10:54:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.440365 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.440418 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.440427 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.440446 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.440462 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:54Z","lastTransitionTime":"2026-01-30T10:54:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.543592 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.543644 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.543658 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.543734 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.543750 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:54Z","lastTransitionTime":"2026-01-30T10:54:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.647210 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.647269 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.647282 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.647305 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.647320 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:54Z","lastTransitionTime":"2026-01-30T10:54:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.750856 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.750932 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.750958 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.750990 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.751016 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:54Z","lastTransitionTime":"2026-01-30T10:54:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.854096 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.854146 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.854155 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.854171 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.854181 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:54Z","lastTransitionTime":"2026-01-30T10:54:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.958045 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.958115 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.958125 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.958145 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:54 crc kubenswrapper[4869]: I0130 10:54:54.958156 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:54Z","lastTransitionTime":"2026-01-30T10:54:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.062307 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.062395 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.062418 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.062449 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.062467 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:55Z","lastTransitionTime":"2026-01-30T10:54:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.129346 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 00:19:19.810376572 +0000 UTC Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.132292 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:55 crc kubenswrapper[4869]: E0130 10:54:55.132470 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.172307 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.172338 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.172346 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.172361 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.172370 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:55Z","lastTransitionTime":"2026-01-30T10:54:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.276189 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.276242 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.276253 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.276273 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.276288 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:55Z","lastTransitionTime":"2026-01-30T10:54:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.379309 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.379353 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.379364 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.379383 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.379398 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:55Z","lastTransitionTime":"2026-01-30T10:54:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.482908 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.482982 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.483000 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.483028 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.483048 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:55Z","lastTransitionTime":"2026-01-30T10:54:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.586504 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.586571 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.586589 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.586620 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.586640 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:55Z","lastTransitionTime":"2026-01-30T10:54:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.690731 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.690781 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.690792 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.690808 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.690822 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:55Z","lastTransitionTime":"2026-01-30T10:54:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.815389 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.815440 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.815450 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.815468 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.815480 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:55Z","lastTransitionTime":"2026-01-30T10:54:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.918088 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.918150 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.918168 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.918185 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:55 crc kubenswrapper[4869]: I0130 10:54:55.918197 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:55Z","lastTransitionTime":"2026-01-30T10:54:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.020985 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.021045 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.021059 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.021076 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.021089 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:56Z","lastTransitionTime":"2026-01-30T10:54:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.123404 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.123451 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.123461 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.123479 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.123491 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:56Z","lastTransitionTime":"2026-01-30T10:54:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.130297 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-06 21:41:11.884421932 +0000 UTC Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.132244 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.132244 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:54:56 crc kubenswrapper[4869]: E0130 10:54:56.132393 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.132463 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:54:56 crc kubenswrapper[4869]: E0130 10:54:56.132524 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:54:56 crc kubenswrapper[4869]: E0130 10:54:56.132569 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.225778 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.225844 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.225856 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.225873 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.225884 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:56Z","lastTransitionTime":"2026-01-30T10:54:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.310514 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.310559 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.310569 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.310588 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.310597 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:56Z","lastTransitionTime":"2026-01-30T10:54:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:56 crc kubenswrapper[4869]: E0130 10:54:56.330052 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:56Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.335574 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.335607 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.335617 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.335633 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.335648 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:56Z","lastTransitionTime":"2026-01-30T10:54:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:56 crc kubenswrapper[4869]: E0130 10:54:56.349661 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:56Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.354277 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.354327 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.354341 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.354361 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.354372 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:56Z","lastTransitionTime":"2026-01-30T10:54:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:56 crc kubenswrapper[4869]: E0130 10:54:56.370410 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:56Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.374454 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.374512 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.374523 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.374540 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.374553 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:56Z","lastTransitionTime":"2026-01-30T10:54:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:56 crc kubenswrapper[4869]: E0130 10:54:56.386857 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:56Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.394549 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.394734 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.394748 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.394766 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.394783 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:56Z","lastTransitionTime":"2026-01-30T10:54:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:56 crc kubenswrapper[4869]: E0130 10:54:56.408525 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:56Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:56 crc kubenswrapper[4869]: E0130 10:54:56.408662 4869 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.410402 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.410453 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.410468 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.410490 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.410504 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:56Z","lastTransitionTime":"2026-01-30T10:54:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.513360 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.513424 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.513451 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.513481 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.513511 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:56Z","lastTransitionTime":"2026-01-30T10:54:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.615778 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.615820 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.615830 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.615845 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.615856 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:56Z","lastTransitionTime":"2026-01-30T10:54:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.718602 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.718664 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.718673 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.718689 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.718699 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:56Z","lastTransitionTime":"2026-01-30T10:54:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.822304 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.822403 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.822420 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.822439 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.822450 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:56Z","lastTransitionTime":"2026-01-30T10:54:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.924815 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.924855 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.924864 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.924879 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:56 crc kubenswrapper[4869]: I0130 10:54:56.924888 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:56Z","lastTransitionTime":"2026-01-30T10:54:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.028148 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.028194 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.028205 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.028224 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.028236 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:57Z","lastTransitionTime":"2026-01-30T10:54:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.082883 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.105552 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:57Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.105789 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.122943 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:57Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.130451 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 13:39:15.147660433 +0000 UTC Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.131041 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.131074 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.131083 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.131098 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.131113 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:57Z","lastTransitionTime":"2026-01-30T10:54:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.131988 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:57 crc kubenswrapper[4869]: E0130 10:54:57.132214 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.140458 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:57Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.161793 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:57Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.177047 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:57Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.193396 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:57Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.204960 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:57Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.220589 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2krt6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35533ad8-7435-413d-bad1-05a0ca183c0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2krt6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:57Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.234330 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.234382 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.234392 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.234411 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.234421 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:57Z","lastTransitionTime":"2026-01-30T10:54:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.236282 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:57Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.251184 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:57Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.264821 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23455e82-301e-4eaa-9358-5f00c6840ca7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d40b6dbc7efd9d8c9011700f004948317ab65b7d5c505b4b83f0751d5b8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa7a5f4026112fef4830817779f204590f20cea60b32bd1033ee99040375e03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-ww999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:57Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.283588 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:57Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.297763 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:57Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.309043 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:57Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.321861 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:57Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.337458 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.337517 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.337533 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.337561 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.337581 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:57Z","lastTransitionTime":"2026-01-30T10:54:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.337907 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:57Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.358172 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:54:42Z\\\",\\\"message\\\":\\\" reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489062 6327 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489232 6327 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489263 6327 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.491799 6327 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0130 10:54:42.491928 6327 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0130 10:54:42.491943 6327 handler.go:208] Removed *v1.Node event handler 2\\\\nI0130 10:54:42.491971 6327 factory.go:656] Stopping watch factory\\\\nI0130 10:54:42.491999 6327 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0130 10:54:42.504079 6327 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0130 10:54:42.504111 6327 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0130 10:54:42.504172 6327 ovnkube.go:599] Stopped ovnkube\\\\nI0130 10:54:42.504215 6327 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0130 10:54:42.504353 6327 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-twvdq_openshift-ovn-kubernetes(3e4cac66-8338-46fe-8296-ce9dbd2257bd)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:57Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.440024 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.440074 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.440089 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.440109 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.440123 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:57Z","lastTransitionTime":"2026-01-30T10:54:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.542745 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.542795 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.542806 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.542819 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.542828 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:57Z","lastTransitionTime":"2026-01-30T10:54:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.645444 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.645487 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.645501 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.645518 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.645530 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:57Z","lastTransitionTime":"2026-01-30T10:54:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.750894 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.751043 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.751059 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.751087 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.751101 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:57Z","lastTransitionTime":"2026-01-30T10:54:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.854470 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.854521 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.854531 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.854547 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.854561 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:57Z","lastTransitionTime":"2026-01-30T10:54:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.957949 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.957995 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.958007 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.958024 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:57 crc kubenswrapper[4869]: I0130 10:54:57.958035 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:57Z","lastTransitionTime":"2026-01-30T10:54:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.060824 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.060892 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.060901 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.060920 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.060931 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:58Z","lastTransitionTime":"2026-01-30T10:54:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.131365 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 20:19:35.728275674 +0000 UTC Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.132799 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.132976 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.133282 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:54:58 crc kubenswrapper[4869]: E0130 10:54:58.133466 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:54:58 crc kubenswrapper[4869]: E0130 10:54:58.133640 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.133762 4869 scope.go:117] "RemoveContainer" containerID="00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac" Jan 30 10:54:58 crc kubenswrapper[4869]: E0130 10:54:58.133792 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.163430 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.163489 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.163506 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.163531 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.163542 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:58Z","lastTransitionTime":"2026-01-30T10:54:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.266109 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.266147 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.266156 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.266172 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.266182 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:58Z","lastTransitionTime":"2026-01-30T10:54:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.369754 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.369796 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.369806 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.369822 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.369832 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:58Z","lastTransitionTime":"2026-01-30T10:54:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.472911 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.472949 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.472958 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.472974 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.472984 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:58Z","lastTransitionTime":"2026-01-30T10:54:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.487092 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvdq_3e4cac66-8338-46fe-8296-ce9dbd2257bd/ovnkube-controller/1.log" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.489643 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerStarted","Data":"e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b"} Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.490097 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.505982 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:58Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.527338 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2krt6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35533ad8-7435-413d-bad1-05a0ca183c0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2krt6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:58Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.551774 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:58Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.575657 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.575733 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.575746 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.575768 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.575781 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:58Z","lastTransitionTime":"2026-01-30T10:54:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.579610 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:58Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.592374 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:58Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.606029 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:58Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.617389 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc99b1fe-d998-4cfd-8300-a5e01724e03f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00f64135d423694f9306b48b181b21d252331e3f944bc62a7d1b3e2bb9b43716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d2072b5b8d5ad65a1e4720492420f37fbd11e6769c1f64353def4a5ae88f452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a0c731e448df12bd903b8b254fdc31bd2ce2912ad7ae9fd6a03c405b9dcc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:58Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.629675 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:58Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.643458 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:58Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.654189 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:58Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.674919 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:58Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.678468 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.678504 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.678514 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.678529 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.678540 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:58Z","lastTransitionTime":"2026-01-30T10:54:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.695345 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:54:42Z\\\",\\\"message\\\":\\\" reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489062 6327 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489232 6327 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489263 6327 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.491799 6327 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0130 10:54:42.491928 6327 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0130 10:54:42.491943 6327 handler.go:208] Removed *v1.Node event handler 2\\\\nI0130 10:54:42.491971 6327 factory.go:656] Stopping watch factory\\\\nI0130 10:54:42.491999 6327 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0130 10:54:42.504079 6327 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0130 10:54:42.504111 6327 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0130 10:54:42.504172 6327 ovnkube.go:599] Stopped ovnkube\\\\nI0130 10:54:42.504215 6327 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0130 10:54:42.504353 6327 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:58Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.708073 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23455e82-301e-4eaa-9358-5f00c6840ca7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d40b6dbc7efd9d8c9011700f004948317ab65b7d5c505b4b83f0751d5b8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa7a5f4026112fef4830817779f204590f20cea60b32bd1033ee99040375e03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-ww999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:58Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.729046 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:58Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.744810 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:58Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.759982 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:58Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.770997 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:58Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.780546 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.780593 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.780605 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.780623 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.780637 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:58Z","lastTransitionTime":"2026-01-30T10:54:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.783449 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:58Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.883503 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.883536 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.883543 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.883557 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.883566 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:58Z","lastTransitionTime":"2026-01-30T10:54:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.986319 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.986363 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.986373 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.986390 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:58 crc kubenswrapper[4869]: I0130 10:54:58.986400 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:58Z","lastTransitionTime":"2026-01-30T10:54:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.089111 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.089187 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.089199 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.089216 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.089228 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:59Z","lastTransitionTime":"2026-01-30T10:54:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.131799 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 06:21:59.414853561 +0000 UTC Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.131990 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:54:59 crc kubenswrapper[4869]: E0130 10:54:59.132135 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.191748 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.191795 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.191805 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.191824 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.191835 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:59Z","lastTransitionTime":"2026-01-30T10:54:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.294614 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.294672 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.294687 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.294705 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.294740 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:59Z","lastTransitionTime":"2026-01-30T10:54:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.396933 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.396974 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.396984 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.397000 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.397013 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:59Z","lastTransitionTime":"2026-01-30T10:54:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.494547 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvdq_3e4cac66-8338-46fe-8296-ce9dbd2257bd/ovnkube-controller/2.log" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.495089 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvdq_3e4cac66-8338-46fe-8296-ce9dbd2257bd/ovnkube-controller/1.log" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.497435 4869 generic.go:334] "Generic (PLEG): container finished" podID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerID="e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b" exitCode=1 Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.497493 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerDied","Data":"e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b"} Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.497542 4869 scope.go:117] "RemoveContainer" containerID="00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.498242 4869 scope.go:117] "RemoveContainer" containerID="e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b" Jan 30 10:54:59 crc kubenswrapper[4869]: E0130 10:54:59.498518 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-twvdq_openshift-ovn-kubernetes(3e4cac66-8338-46fe-8296-ce9dbd2257bd)\"" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.498665 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.498691 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.498699 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.498727 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.498737 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:59Z","lastTransitionTime":"2026-01-30T10:54:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.511556 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23455e82-301e-4eaa-9358-5f00c6840ca7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d40b6dbc7efd9d8c9011700f004948317ab65b7d5c505b4b83f0751d5b8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa7a5f4026112fef4830817779f204590f20cea60b32bd1033ee99040375e03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-ww999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:59Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.531319 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:59Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.544184 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:59Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.556081 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:59Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.567159 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:59Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.580645 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:59Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.601676 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.601770 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.601783 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.601803 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.601814 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:59Z","lastTransitionTime":"2026-01-30T10:54:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.602429 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:54:42Z\\\",\\\"message\\\":\\\" reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489062 6327 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489232 6327 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489263 6327 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.491799 6327 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0130 10:54:42.491928 6327 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0130 10:54:42.491943 6327 handler.go:208] Removed *v1.Node event handler 2\\\\nI0130 10:54:42.491971 6327 factory.go:656] Stopping watch factory\\\\nI0130 10:54:42.491999 6327 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0130 10:54:42.504079 6327 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0130 10:54:42.504111 6327 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0130 10:54:42.504172 6327 ovnkube.go:599] Stopped ovnkube\\\\nI0130 10:54:42.504215 6327 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0130 10:54:42.504353 6327 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:54:58Z\\\",\\\"message\\\":\\\"et-xd92c for pod on switch crc\\\\nI0130 10:54:58.941553 6557 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999 after 0 failed attempt(s)\\\\nI0130 10:54:58.941561 6557 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999\\\\nI0130 10:54:58.941420 6557 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/network-metrics-daemon-2krt6\\\\nI0130 10:54:58.941574 6557 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/network-metrics-daemon-2krt6\\\\nI0130 10:54:58.941574 6557 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI0130 10:54:58.941580 6557 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-2krt6 in node crc\\\\nI0130 10:54:58.941603 6557 base_network_controller_pods.go:477] [default/openshift-multus/network-metrics-daemon-2krt6] creating logical port openshift-multus_network-metrics-daemon-2krt6 for pod on switch crc\\\\nF0130 10:54:58.941393 6557 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:59Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.615415 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:59Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.625947 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:59Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.638259 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:59Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.649602 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:59Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.660266 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:59Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.671047 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:59Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.680119 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:59Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.688655 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2krt6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35533ad8-7435-413d-bad1-05a0ca183c0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2krt6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:59Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.701588 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc99b1fe-d998-4cfd-8300-a5e01724e03f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00f64135d423694f9306b48b181b21d252331e3f944bc62a7d1b3e2bb9b43716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d2072b5b8d5ad65a1e4720492420f37fbd11e6769c1f64353def4a5ae88f452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a0c731e448df12bd903b8b254fdc31bd2ce2912ad7ae9fd6a03c405b9dcc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:59Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.704227 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.704269 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.704281 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.704298 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.704309 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:59Z","lastTransitionTime":"2026-01-30T10:54:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.715832 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:59Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.729510 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:54:59Z is after 2025-08-24T17:21:41Z" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.807830 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.807863 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.807873 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.807888 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.807901 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:59Z","lastTransitionTime":"2026-01-30T10:54:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.910144 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.910205 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.910219 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.910240 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:54:59 crc kubenswrapper[4869]: I0130 10:54:59.910254 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:54:59Z","lastTransitionTime":"2026-01-30T10:54:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.012740 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.012787 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.012810 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.012828 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.012840 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:00Z","lastTransitionTime":"2026-01-30T10:55:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.115449 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.115493 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.115502 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.115517 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.115527 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:00Z","lastTransitionTime":"2026-01-30T10:55:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.131978 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.132004 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 05:27:57.016562104 +0000 UTC Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.131982 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.131980 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:00 crc kubenswrapper[4869]: E0130 10:55:00.132124 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:00 crc kubenswrapper[4869]: E0130 10:55:00.132247 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:00 crc kubenswrapper[4869]: E0130 10:55:00.132310 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.150336 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.163907 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.176207 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.190305 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.201112 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.212247 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2krt6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35533ad8-7435-413d-bad1-05a0ca183c0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2krt6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.217450 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.217491 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.217499 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.217515 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.217528 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:00Z","lastTransitionTime":"2026-01-30T10:55:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.223998 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc99b1fe-d998-4cfd-8300-a5e01724e03f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00f64135d423694f9306b48b181b21d252331e3f944bc62a7d1b3e2bb9b43716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d2072b5b8d5ad65a1e4720492420f37fbd11e6769c1f64353def4a5ae88f452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a0c731e448df12bd903b8b254fdc31bd2ce2912ad7ae9fd6a03c405b9dcc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.239169 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.251823 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.263020 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23455e82-301e-4eaa-9358-5f00c6840ca7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d40b6dbc7efd9d8c9011700f004948317ab65b7d5c505b4b83f0751d5b8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa7a5f4026112fef4830817779f204590f20cea60b32bd1033ee99040375e03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-ww999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.280104 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.291966 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.302502 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.311227 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.319856 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.319916 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.319925 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.319948 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.319973 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:00Z","lastTransitionTime":"2026-01-30T10:55:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.327108 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.352630 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00de492a0099fbaaac39d7ce7b11cf7f63d7c8780a9a0ac659c2410b3a68cdac\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:54:42Z\\\",\\\"message\\\":\\\" reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489062 6327 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489232 6327 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.489263 6327 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 10:54:42.491799 6327 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0130 10:54:42.491928 6327 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0130 10:54:42.491943 6327 handler.go:208] Removed *v1.Node event handler 2\\\\nI0130 10:54:42.491971 6327 factory.go:656] Stopping watch factory\\\\nI0130 10:54:42.491999 6327 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0130 10:54:42.504079 6327 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0130 10:54:42.504111 6327 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0130 10:54:42.504172 6327 ovnkube.go:599] Stopped ovnkube\\\\nI0130 10:54:42.504215 6327 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0130 10:54:42.504353 6327 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:54:58Z\\\",\\\"message\\\":\\\"et-xd92c for pod on switch crc\\\\nI0130 10:54:58.941553 6557 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999 after 0 failed attempt(s)\\\\nI0130 10:54:58.941561 6557 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999\\\\nI0130 10:54:58.941420 6557 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/network-metrics-daemon-2krt6\\\\nI0130 10:54:58.941574 6557 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/network-metrics-daemon-2krt6\\\\nI0130 10:54:58.941574 6557 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI0130 10:54:58.941580 6557 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-2krt6 in node crc\\\\nI0130 10:54:58.941603 6557 base_network_controller_pods.go:477] [default/openshift-multus/network-metrics-daemon-2krt6] creating logical port openshift-multus_network-metrics-daemon-2krt6 for pod on switch crc\\\\nF0130 10:54:58.941393 6557 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.365562 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.376662 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.422778 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.422814 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.422823 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.422838 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.422848 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:00Z","lastTransitionTime":"2026-01-30T10:55:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.502513 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvdq_3e4cac66-8338-46fe-8296-ce9dbd2257bd/ovnkube-controller/2.log" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.506242 4869 scope.go:117] "RemoveContainer" containerID="e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b" Jan 30 10:55:00 crc kubenswrapper[4869]: E0130 10:55:00.506422 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-twvdq_openshift-ovn-kubernetes(3e4cac66-8338-46fe-8296-ce9dbd2257bd)\"" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.524927 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.524985 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.525002 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.525024 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.525034 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:00Z","lastTransitionTime":"2026-01-30T10:55:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.530050 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:54:58Z\\\",\\\"message\\\":\\\"et-xd92c for pod on switch crc\\\\nI0130 10:54:58.941553 6557 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999 after 0 failed attempt(s)\\\\nI0130 10:54:58.941561 6557 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999\\\\nI0130 10:54:58.941420 6557 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/network-metrics-daemon-2krt6\\\\nI0130 10:54:58.941574 6557 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/network-metrics-daemon-2krt6\\\\nI0130 10:54:58.941574 6557 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI0130 10:54:58.941580 6557 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-2krt6 in node crc\\\\nI0130 10:54:58.941603 6557 base_network_controller_pods.go:477] [default/openshift-multus/network-metrics-daemon-2krt6] creating logical port openshift-multus_network-metrics-daemon-2krt6 for pod on switch crc\\\\nF0130 10:54:58.941393 6557 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-twvdq_openshift-ovn-kubernetes(3e4cac66-8338-46fe-8296-ce9dbd2257bd)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.544041 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23455e82-301e-4eaa-9358-5f00c6840ca7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d40b6dbc7efd9d8c9011700f004948317ab65b7d5c505b4b83f0751d5b8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa7a5f4026112fef4830817779f204590f20cea60b32bd1033ee99040375e03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-ww999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.557491 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs\") pod \"network-metrics-daemon-2krt6\" (UID: \"35533ad8-7435-413d-bad1-05a0ca183c0d\") " pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:00 crc kubenswrapper[4869]: E0130 10:55:00.557633 4869 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 10:55:00 crc kubenswrapper[4869]: E0130 10:55:00.557698 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs podName:35533ad8-7435-413d-bad1-05a0ca183c0d nodeName:}" failed. No retries permitted until 2026-01-30 10:55:16.557679462 +0000 UTC m=+67.107555528 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs") pod "network-metrics-daemon-2krt6" (UID: "35533ad8-7435-413d-bad1-05a0ca183c0d") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.563368 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.576363 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.588208 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.599867 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.613470 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.625637 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.627642 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.627784 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.627901 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.627975 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.628041 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:00Z","lastTransitionTime":"2026-01-30T10:55:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.636427 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.649241 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.660747 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.671447 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.681469 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.691805 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.702818 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2krt6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35533ad8-7435-413d-bad1-05a0ca183c0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2krt6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.714485 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc99b1fe-d998-4cfd-8300-a5e01724e03f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00f64135d423694f9306b48b181b21d252331e3f944bc62a7d1b3e2bb9b43716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d2072b5b8d5ad65a1e4720492420f37fbd11e6769c1f64353def4a5ae88f452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a0c731e448df12bd903b8b254fdc31bd2ce2912ad7ae9fd6a03c405b9dcc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.726109 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.729983 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.730060 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.730075 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.730091 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.730102 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:00Z","lastTransitionTime":"2026-01-30T10:55:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.737798 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:00Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.838909 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.839257 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.839335 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.839414 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.839495 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:00Z","lastTransitionTime":"2026-01-30T10:55:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.941974 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.942326 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.942432 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.942509 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:00 crc kubenswrapper[4869]: I0130 10:55:00.942582 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:00Z","lastTransitionTime":"2026-01-30T10:55:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.044967 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.044999 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.045008 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.045022 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.045030 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:01Z","lastTransitionTime":"2026-01-30T10:55:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.132572 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 17:10:29.538992289 +0000 UTC Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.132731 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:01 crc kubenswrapper[4869]: E0130 10:55:01.132891 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.148075 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.148112 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.148122 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.148140 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.148151 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:01Z","lastTransitionTime":"2026-01-30T10:55:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.251286 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.251594 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.251662 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.251757 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.251835 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:01Z","lastTransitionTime":"2026-01-30T10:55:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.354180 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.354214 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.354224 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.354238 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.354250 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:01Z","lastTransitionTime":"2026-01-30T10:55:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.457744 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.457776 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.457786 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.457802 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.457814 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:01Z","lastTransitionTime":"2026-01-30T10:55:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.560825 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.560872 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.560881 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.560895 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.560904 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:01Z","lastTransitionTime":"2026-01-30T10:55:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.665460 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.665551 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.665571 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.665608 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.665630 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:01Z","lastTransitionTime":"2026-01-30T10:55:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.767748 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.767863 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.767887 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.767909 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.767922 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:01Z","lastTransitionTime":"2026-01-30T10:55:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.871020 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.871096 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.871113 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.871142 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.871159 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:01Z","lastTransitionTime":"2026-01-30T10:55:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.974316 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:55:01 crc kubenswrapper[4869]: E0130 10:55:01.974486 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:55:33.974463938 +0000 UTC m=+84.524340004 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.975145 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:01 crc kubenswrapper[4869]: E0130 10:55:01.975455 4869 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.975670 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:01 crc kubenswrapper[4869]: E0130 10:55:01.975841 4869 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 10:55:01 crc kubenswrapper[4869]: E0130 10:55:01.975858 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 10:55:33.975756445 +0000 UTC m=+84.525632531 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 10:55:01 crc kubenswrapper[4869]: E0130 10:55:01.976434 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 10:55:33.976365962 +0000 UTC m=+84.526242128 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.978750 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.978831 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.978849 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.978879 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:01 crc kubenswrapper[4869]: I0130 10:55:01.978903 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:01Z","lastTransitionTime":"2026-01-30T10:55:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.076741 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.076814 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:02 crc kubenswrapper[4869]: E0130 10:55:02.076946 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 10:55:02 crc kubenswrapper[4869]: E0130 10:55:02.076964 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 10:55:02 crc kubenswrapper[4869]: E0130 10:55:02.076975 4869 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:55:02 crc kubenswrapper[4869]: E0130 10:55:02.077017 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-30 10:55:34.077004403 +0000 UTC m=+84.626880469 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:55:02 crc kubenswrapper[4869]: E0130 10:55:02.078406 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 10:55:02 crc kubenswrapper[4869]: E0130 10:55:02.078435 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 10:55:02 crc kubenswrapper[4869]: E0130 10:55:02.078445 4869 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:55:02 crc kubenswrapper[4869]: E0130 10:55:02.078492 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-30 10:55:34.078480325 +0000 UTC m=+84.628356391 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.081468 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.081495 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.081503 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.081516 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.081526 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:02Z","lastTransitionTime":"2026-01-30T10:55:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.132982 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:02 crc kubenswrapper[4869]: E0130 10:55:02.133136 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.133243 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:02 crc kubenswrapper[4869]: E0130 10:55:02.133367 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.132983 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:02 crc kubenswrapper[4869]: E0130 10:55:02.133464 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.133520 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 15:47:05.280922966 +0000 UTC Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.184490 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.184546 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.184559 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.184581 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.184597 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:02Z","lastTransitionTime":"2026-01-30T10:55:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.287670 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.288235 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.288315 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.288395 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.288454 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:02Z","lastTransitionTime":"2026-01-30T10:55:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.390458 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.390500 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.390508 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.390522 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.390531 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:02Z","lastTransitionTime":"2026-01-30T10:55:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.493402 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.493436 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.493447 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.493462 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.493472 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:02Z","lastTransitionTime":"2026-01-30T10:55:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.596676 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.596754 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.596764 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.596780 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.596799 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:02Z","lastTransitionTime":"2026-01-30T10:55:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.699513 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.699552 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.699561 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.699576 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.699587 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:02Z","lastTransitionTime":"2026-01-30T10:55:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.802302 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.802386 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.802399 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.802420 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.802432 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:02Z","lastTransitionTime":"2026-01-30T10:55:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.904929 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.904957 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.904965 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.904978 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:02 crc kubenswrapper[4869]: I0130 10:55:02.904988 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:02Z","lastTransitionTime":"2026-01-30T10:55:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.007774 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.007820 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.007832 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.007849 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.007861 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:03Z","lastTransitionTime":"2026-01-30T10:55:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.111251 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.111309 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.111340 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.111364 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.111382 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:03Z","lastTransitionTime":"2026-01-30T10:55:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.132517 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:03 crc kubenswrapper[4869]: E0130 10:55:03.132673 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.133645 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 16:08:28.734400968 +0000 UTC Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.214077 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.214117 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.214125 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.214142 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.214152 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:03Z","lastTransitionTime":"2026-01-30T10:55:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.316297 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.316325 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.316333 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.316348 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.316357 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:03Z","lastTransitionTime":"2026-01-30T10:55:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.418958 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.419001 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.419012 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.419028 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.419037 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:03Z","lastTransitionTime":"2026-01-30T10:55:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.521561 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.521898 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.521975 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.522051 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.522142 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:03Z","lastTransitionTime":"2026-01-30T10:55:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.625056 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.625543 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.625605 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.625668 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.625760 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:03Z","lastTransitionTime":"2026-01-30T10:55:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.728072 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.728131 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.728149 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.728172 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.728189 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:03Z","lastTransitionTime":"2026-01-30T10:55:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.830289 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.830346 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.830356 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.830371 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.830388 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:03Z","lastTransitionTime":"2026-01-30T10:55:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.932925 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.932974 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.932987 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.933007 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:03 crc kubenswrapper[4869]: I0130 10:55:03.933019 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:03Z","lastTransitionTime":"2026-01-30T10:55:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.035675 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.035738 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.035765 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.035781 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.035795 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:04Z","lastTransitionTime":"2026-01-30T10:55:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.132235 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.132341 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.132242 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:04 crc kubenswrapper[4869]: E0130 10:55:04.132396 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.135463 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 01:04:27.826478902 +0000 UTC Jan 30 10:55:04 crc kubenswrapper[4869]: E0130 10:55:04.135788 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:04 crc kubenswrapper[4869]: E0130 10:55:04.136041 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.138638 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.138668 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.138678 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.138693 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.138703 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:04Z","lastTransitionTime":"2026-01-30T10:55:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.242165 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.242209 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.242218 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.242234 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.242243 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:04Z","lastTransitionTime":"2026-01-30T10:55:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.345165 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.345206 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.345214 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.345231 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.345242 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:04Z","lastTransitionTime":"2026-01-30T10:55:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.448405 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.448469 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.448486 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.448507 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.448524 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:04Z","lastTransitionTime":"2026-01-30T10:55:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.551207 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.551253 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.551261 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.551277 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.551286 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:04Z","lastTransitionTime":"2026-01-30T10:55:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.654027 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.654067 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.654077 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.654096 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.654106 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:04Z","lastTransitionTime":"2026-01-30T10:55:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.756760 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.756804 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.756813 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.756827 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.756836 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:04Z","lastTransitionTime":"2026-01-30T10:55:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.859016 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.859373 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.859438 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.859509 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.859576 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:04Z","lastTransitionTime":"2026-01-30T10:55:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.910817 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.924108 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:04Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.939252 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:04Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.951476 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc99b1fe-d998-4cfd-8300-a5e01724e03f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00f64135d423694f9306b48b181b21d252331e3f944bc62a7d1b3e2bb9b43716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d2072b5b8d5ad65a1e4720492420f37fbd11e6769c1f64353def4a5ae88f452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a0c731e448df12bd903b8b254fdc31bd2ce2912ad7ae9fd6a03c405b9dcc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:04Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.962318 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.962360 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.962369 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.962385 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.962394 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:04Z","lastTransitionTime":"2026-01-30T10:55:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.969877 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:04Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.983656 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:04Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:04 crc kubenswrapper[4869]: I0130 10:55:04.995232 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:04Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.006760 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:05Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.023042 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:05Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.040703 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:54:58Z\\\",\\\"message\\\":\\\"et-xd92c for pod on switch crc\\\\nI0130 10:54:58.941553 6557 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999 after 0 failed attempt(s)\\\\nI0130 10:54:58.941561 6557 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999\\\\nI0130 10:54:58.941420 6557 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/network-metrics-daemon-2krt6\\\\nI0130 10:54:58.941574 6557 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/network-metrics-daemon-2krt6\\\\nI0130 10:54:58.941574 6557 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI0130 10:54:58.941580 6557 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-2krt6 in node crc\\\\nI0130 10:54:58.941603 6557 base_network_controller_pods.go:477] [default/openshift-multus/network-metrics-daemon-2krt6] creating logical port openshift-multus_network-metrics-daemon-2krt6 for pod on switch crc\\\\nF0130 10:54:58.941393 6557 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-twvdq_openshift-ovn-kubernetes(3e4cac66-8338-46fe-8296-ce9dbd2257bd)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:05Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.055066 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23455e82-301e-4eaa-9358-5f00c6840ca7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d40b6dbc7efd9d8c9011700f004948317ab65b7d5c505b4b83f0751d5b8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa7a5f4026112fef4830817779f204590f20cea60b32bd1033ee99040375e03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-ww999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:05Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.064083 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.064133 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.064146 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.064164 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.064174 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:05Z","lastTransitionTime":"2026-01-30T10:55:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.070695 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:05Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.082941 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:05Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.095564 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:05Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.105765 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:05Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.115794 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:05Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.124123 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:05Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.131983 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2krt6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35533ad8-7435-413d-bad1-05a0ca183c0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2krt6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:05Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.132014 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:05 crc kubenswrapper[4869]: E0130 10:55:05.132271 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.136000 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 04:42:14.979547664 +0000 UTC Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.143963 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:05Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.166549 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.166583 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.166593 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.166607 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.166616 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:05Z","lastTransitionTime":"2026-01-30T10:55:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.268798 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.269128 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.269193 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.269469 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.269537 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:05Z","lastTransitionTime":"2026-01-30T10:55:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.380388 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.380434 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.380444 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.380460 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.380469 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:05Z","lastTransitionTime":"2026-01-30T10:55:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.483157 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.483199 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.483208 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.483223 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.483232 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:05Z","lastTransitionTime":"2026-01-30T10:55:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.585870 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.585965 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.585978 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.585994 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.586006 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:05Z","lastTransitionTime":"2026-01-30T10:55:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.688089 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.688130 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.688139 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.688155 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.688165 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:05Z","lastTransitionTime":"2026-01-30T10:55:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.791012 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.791051 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.791062 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.791079 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.791089 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:05Z","lastTransitionTime":"2026-01-30T10:55:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.893594 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.893935 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.894009 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.894076 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.894136 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:05Z","lastTransitionTime":"2026-01-30T10:55:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.996199 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.996233 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.996242 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.996255 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:05 crc kubenswrapper[4869]: I0130 10:55:05.996265 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:05Z","lastTransitionTime":"2026-01-30T10:55:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.098133 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.098474 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.098592 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.098673 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.098769 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:06Z","lastTransitionTime":"2026-01-30T10:55:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.132692 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.132747 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.132730 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:06 crc kubenswrapper[4869]: E0130 10:55:06.132845 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:06 crc kubenswrapper[4869]: E0130 10:55:06.132924 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:06 crc kubenswrapper[4869]: E0130 10:55:06.132989 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.136087 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 11:18:17.038847384 +0000 UTC Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.201086 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.201122 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.201131 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.201147 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.201156 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:06Z","lastTransitionTime":"2026-01-30T10:55:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.303800 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.303834 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.303844 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.303863 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.303874 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:06Z","lastTransitionTime":"2026-01-30T10:55:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.406674 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.407021 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.407091 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.407168 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.407241 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:06Z","lastTransitionTime":"2026-01-30T10:55:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.510173 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.510541 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.510634 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.510748 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.510827 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:06Z","lastTransitionTime":"2026-01-30T10:55:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.614013 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.614412 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.614493 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.614562 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.614629 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:06Z","lastTransitionTime":"2026-01-30T10:55:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.647806 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.648084 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.648206 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.648277 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.648339 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:06Z","lastTransitionTime":"2026-01-30T10:55:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:06 crc kubenswrapper[4869]: E0130 10:55:06.661201 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:06Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.665573 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.665804 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.665973 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.666091 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.666389 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:06Z","lastTransitionTime":"2026-01-30T10:55:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:06 crc kubenswrapper[4869]: E0130 10:55:06.679973 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:06Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.685852 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.685890 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.685899 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.685914 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.685923 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:06Z","lastTransitionTime":"2026-01-30T10:55:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:06 crc kubenswrapper[4869]: E0130 10:55:06.696579 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:06Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.699763 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.699908 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.699996 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.700085 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.700235 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:06Z","lastTransitionTime":"2026-01-30T10:55:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:06 crc kubenswrapper[4869]: E0130 10:55:06.736691 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:06Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.748818 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.748861 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.748872 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.748888 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.748898 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:06Z","lastTransitionTime":"2026-01-30T10:55:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:06 crc kubenswrapper[4869]: E0130 10:55:06.771375 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:06Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:06 crc kubenswrapper[4869]: E0130 10:55:06.771549 4869 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.773369 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.773504 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.773563 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.773631 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.773693 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:06Z","lastTransitionTime":"2026-01-30T10:55:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.876208 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.876567 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.876693 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.876829 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.876924 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:06Z","lastTransitionTime":"2026-01-30T10:55:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.979503 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.979800 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.979868 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.979974 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:06 crc kubenswrapper[4869]: I0130 10:55:06.980040 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:06Z","lastTransitionTime":"2026-01-30T10:55:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.081885 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.081919 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.081942 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.081959 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.081968 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:07Z","lastTransitionTime":"2026-01-30T10:55:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.132598 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:07 crc kubenswrapper[4869]: E0130 10:55:07.132747 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.136658 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 10:14:14.223676573 +0000 UTC Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.184301 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.184327 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.184334 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.184348 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.184358 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:07Z","lastTransitionTime":"2026-01-30T10:55:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.287508 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.287543 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.287554 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.287569 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.287579 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:07Z","lastTransitionTime":"2026-01-30T10:55:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.389878 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.389908 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.389918 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.389931 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.389940 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:07Z","lastTransitionTime":"2026-01-30T10:55:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.492765 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.492803 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.492812 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.492824 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.492835 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:07Z","lastTransitionTime":"2026-01-30T10:55:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.595543 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.595578 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.595589 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.595606 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.595620 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:07Z","lastTransitionTime":"2026-01-30T10:55:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.698365 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.698425 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.698436 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.698450 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.698461 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:07Z","lastTransitionTime":"2026-01-30T10:55:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.801491 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.801540 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.801554 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.801573 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.801584 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:07Z","lastTransitionTime":"2026-01-30T10:55:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.903744 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.903787 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.903797 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.903813 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:07 crc kubenswrapper[4869]: I0130 10:55:07.903823 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:07Z","lastTransitionTime":"2026-01-30T10:55:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.006492 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.006544 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.006557 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.006575 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.006587 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:08Z","lastTransitionTime":"2026-01-30T10:55:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.108484 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.108555 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.108568 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.108585 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.108597 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:08Z","lastTransitionTime":"2026-01-30T10:55:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.132293 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.132303 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.132309 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:08 crc kubenswrapper[4869]: E0130 10:55:08.132702 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:08 crc kubenswrapper[4869]: E0130 10:55:08.132456 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:08 crc kubenswrapper[4869]: E0130 10:55:08.132764 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.137490 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 12:29:47.940033675 +0000 UTC Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.211543 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.211587 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.211598 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.211616 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.211627 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:08Z","lastTransitionTime":"2026-01-30T10:55:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.314604 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.314675 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.314701 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.314788 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.314811 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:08Z","lastTransitionTime":"2026-01-30T10:55:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.417843 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.417885 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.417894 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.417908 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.417918 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:08Z","lastTransitionTime":"2026-01-30T10:55:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.521017 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.521052 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.521062 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.521076 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.521085 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:08Z","lastTransitionTime":"2026-01-30T10:55:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.624082 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.624143 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.624160 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.624183 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.624216 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:08Z","lastTransitionTime":"2026-01-30T10:55:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.727953 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.728046 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.728060 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.728087 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.728105 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:08Z","lastTransitionTime":"2026-01-30T10:55:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.830779 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.831053 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.831136 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.831289 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.831397 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:08Z","lastTransitionTime":"2026-01-30T10:55:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.934487 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.934567 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.934591 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.934627 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:08 crc kubenswrapper[4869]: I0130 10:55:08.934650 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:08Z","lastTransitionTime":"2026-01-30T10:55:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.038428 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.038496 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.038521 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.038553 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.038577 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:09Z","lastTransitionTime":"2026-01-30T10:55:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.132995 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:09 crc kubenswrapper[4869]: E0130 10:55:09.133205 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.138245 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 20:49:39.466175165 +0000 UTC Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.141651 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.141703 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.141748 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.141766 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.141775 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:09Z","lastTransitionTime":"2026-01-30T10:55:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.244335 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.244378 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.244388 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.244406 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.244417 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:09Z","lastTransitionTime":"2026-01-30T10:55:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.346769 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.347045 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.347160 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.347258 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.347396 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:09Z","lastTransitionTime":"2026-01-30T10:55:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.450861 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.450911 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.450920 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.450933 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.450942 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:09Z","lastTransitionTime":"2026-01-30T10:55:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.553215 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.553248 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.553260 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.553278 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.553288 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:09Z","lastTransitionTime":"2026-01-30T10:55:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.656452 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.656486 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.656541 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.656568 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.656580 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:09Z","lastTransitionTime":"2026-01-30T10:55:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.759046 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.759102 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.759115 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.759139 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.759164 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:09Z","lastTransitionTime":"2026-01-30T10:55:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.862045 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.862647 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.862888 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.863086 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.863270 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:09Z","lastTransitionTime":"2026-01-30T10:55:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.967070 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.967527 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.967948 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.968297 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:09 crc kubenswrapper[4869]: I0130 10:55:09.968620 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:09Z","lastTransitionTime":"2026-01-30T10:55:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.070607 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.070681 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.070706 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.070800 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.070819 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:10Z","lastTransitionTime":"2026-01-30T10:55:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.132703 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.132802 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.132810 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:10 crc kubenswrapper[4869]: E0130 10:55:10.132942 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:10 crc kubenswrapper[4869]: E0130 10:55:10.133834 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:10 crc kubenswrapper[4869]: E0130 10:55:10.134169 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.138382 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 15:56:32.802078669 +0000 UTC Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.154066 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:10Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.169236 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:10Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.173721 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.173761 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.173775 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.173793 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.173804 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:10Z","lastTransitionTime":"2026-01-30T10:55:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.182492 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:10Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.195973 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:10Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.212549 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:10Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.228037 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2krt6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35533ad8-7435-413d-bad1-05a0ca183c0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2krt6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:10Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.241928 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc99b1fe-d998-4cfd-8300-a5e01724e03f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00f64135d423694f9306b48b181b21d252331e3f944bc62a7d1b3e2bb9b43716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d2072b5b8d5ad65a1e4720492420f37fbd11e6769c1f64353def4a5ae88f452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a0c731e448df12bd903b8b254fdc31bd2ce2912ad7ae9fd6a03c405b9dcc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:10Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.258614 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:10Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.273240 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:10Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.276497 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.276546 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.276558 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.276577 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.276589 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:10Z","lastTransitionTime":"2026-01-30T10:55:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.297379 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:10Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.314425 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:10Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.328896 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:10Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.337625 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:10Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.352206 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:10Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.370677 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:54:58Z\\\",\\\"message\\\":\\\"et-xd92c for pod on switch crc\\\\nI0130 10:54:58.941553 6557 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999 after 0 failed attempt(s)\\\\nI0130 10:54:58.941561 6557 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999\\\\nI0130 10:54:58.941420 6557 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/network-metrics-daemon-2krt6\\\\nI0130 10:54:58.941574 6557 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/network-metrics-daemon-2krt6\\\\nI0130 10:54:58.941574 6557 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI0130 10:54:58.941580 6557 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-2krt6 in node crc\\\\nI0130 10:54:58.941603 6557 base_network_controller_pods.go:477] [default/openshift-multus/network-metrics-daemon-2krt6] creating logical port openshift-multus_network-metrics-daemon-2krt6 for pod on switch crc\\\\nF0130 10:54:58.941393 6557 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-twvdq_openshift-ovn-kubernetes(3e4cac66-8338-46fe-8296-ce9dbd2257bd)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:10Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.379030 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.379102 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.379114 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.379129 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.379139 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:10Z","lastTransitionTime":"2026-01-30T10:55:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.382756 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23455e82-301e-4eaa-9358-5f00c6840ca7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d40b6dbc7efd9d8c9011700f004948317ab65b7d5c505b4b83f0751d5b8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa7a5f4026112fef4830817779f204590f20cea60b32bd1033ee99040375e03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-ww999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:10Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.395963 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:10Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.407540 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:10Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.481434 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.481811 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.481909 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.482036 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.482146 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:10Z","lastTransitionTime":"2026-01-30T10:55:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.584614 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.584646 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.584654 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.584667 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.584675 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:10Z","lastTransitionTime":"2026-01-30T10:55:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.687363 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.687397 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.687406 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.687420 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.687431 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:10Z","lastTransitionTime":"2026-01-30T10:55:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.789464 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.789858 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.789957 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.790058 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.790142 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:10Z","lastTransitionTime":"2026-01-30T10:55:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.892830 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.892898 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.892910 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.892926 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.892936 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:10Z","lastTransitionTime":"2026-01-30T10:55:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.996632 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.996668 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.996680 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.996696 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:10 crc kubenswrapper[4869]: I0130 10:55:10.996728 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:10Z","lastTransitionTime":"2026-01-30T10:55:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.098878 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.098915 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.098925 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.098982 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.098994 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:11Z","lastTransitionTime":"2026-01-30T10:55:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.132132 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:11 crc kubenswrapper[4869]: E0130 10:55:11.132254 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.139490 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 00:18:41.512068646 +0000 UTC Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.201834 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.202124 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.202136 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.202153 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.202164 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:11Z","lastTransitionTime":"2026-01-30T10:55:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.304318 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.304351 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.304364 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.304379 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.304390 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:11Z","lastTransitionTime":"2026-01-30T10:55:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.408471 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.408540 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.408566 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.408596 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.408619 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:11Z","lastTransitionTime":"2026-01-30T10:55:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.511140 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.511187 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.511202 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.511225 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.511240 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:11Z","lastTransitionTime":"2026-01-30T10:55:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.615356 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.615774 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.615855 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.615943 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.616009 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:11Z","lastTransitionTime":"2026-01-30T10:55:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.718960 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.719410 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.719602 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.719810 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.719979 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:11Z","lastTransitionTime":"2026-01-30T10:55:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.823605 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.824157 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.824355 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.824551 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.825432 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:11Z","lastTransitionTime":"2026-01-30T10:55:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.928986 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.929052 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.929071 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.929096 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:11 crc kubenswrapper[4869]: I0130 10:55:11.929115 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:11Z","lastTransitionTime":"2026-01-30T10:55:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.031959 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.032012 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.032028 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.032051 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.032068 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:12Z","lastTransitionTime":"2026-01-30T10:55:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.132773 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.132901 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.133298 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:12 crc kubenswrapper[4869]: E0130 10:55:12.133434 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:12 crc kubenswrapper[4869]: E0130 10:55:12.133545 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.133725 4869 scope.go:117] "RemoveContainer" containerID="e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b" Jan 30 10:55:12 crc kubenswrapper[4869]: E0130 10:55:12.133887 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-twvdq_openshift-ovn-kubernetes(3e4cac66-8338-46fe-8296-ce9dbd2257bd)\"" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" Jan 30 10:55:12 crc kubenswrapper[4869]: E0130 10:55:12.133891 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.136864 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.136933 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.136951 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.136982 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.137007 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:12Z","lastTransitionTime":"2026-01-30T10:55:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.139928 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 04:11:24.591191191 +0000 UTC Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.239923 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.239957 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.239972 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.239986 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.239998 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:12Z","lastTransitionTime":"2026-01-30T10:55:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.343256 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.343312 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.343324 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.343345 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.343356 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:12Z","lastTransitionTime":"2026-01-30T10:55:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.445436 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.445478 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.445486 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.445503 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.445512 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:12Z","lastTransitionTime":"2026-01-30T10:55:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.547954 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.548001 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.548020 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.548043 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.548060 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:12Z","lastTransitionTime":"2026-01-30T10:55:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.651256 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.651294 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.651305 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.651322 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.651334 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:12Z","lastTransitionTime":"2026-01-30T10:55:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.753234 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.753273 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.753288 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.753305 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.753317 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:12Z","lastTransitionTime":"2026-01-30T10:55:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.855687 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.855730 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.855740 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.855752 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.855761 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:12Z","lastTransitionTime":"2026-01-30T10:55:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.965106 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.965144 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.965152 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.965168 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:12 crc kubenswrapper[4869]: I0130 10:55:12.965178 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:12Z","lastTransitionTime":"2026-01-30T10:55:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.067105 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.067144 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.067155 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.067168 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.067177 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:13Z","lastTransitionTime":"2026-01-30T10:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.132276 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:13 crc kubenswrapper[4869]: E0130 10:55:13.132436 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.140604 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 06:36:31.125425338 +0000 UTC Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.170272 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.170300 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.170308 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.170321 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.170331 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:13Z","lastTransitionTime":"2026-01-30T10:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.273221 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.273261 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.273270 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.273285 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.273294 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:13Z","lastTransitionTime":"2026-01-30T10:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.375255 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.375311 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.375324 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.375342 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.375355 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:13Z","lastTransitionTime":"2026-01-30T10:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.478103 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.478141 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.478150 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.478167 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.478176 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:13Z","lastTransitionTime":"2026-01-30T10:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.582300 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.582378 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.582404 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.582436 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.582458 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:13Z","lastTransitionTime":"2026-01-30T10:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.685658 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.685731 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.685742 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.685757 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.685772 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:13Z","lastTransitionTime":"2026-01-30T10:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.789340 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.789385 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.789399 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.789416 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.789429 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:13Z","lastTransitionTime":"2026-01-30T10:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.892309 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.892680 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.892783 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.892894 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.892980 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:13Z","lastTransitionTime":"2026-01-30T10:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.995158 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.995194 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.995201 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.995215 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:13 crc kubenswrapper[4869]: I0130 10:55:13.995222 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:13Z","lastTransitionTime":"2026-01-30T10:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.097683 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.097750 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.097761 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.097776 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.097804 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:14Z","lastTransitionTime":"2026-01-30T10:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.132382 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.132428 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.132454 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:14 crc kubenswrapper[4869]: E0130 10:55:14.133052 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:14 crc kubenswrapper[4869]: E0130 10:55:14.133157 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:14 crc kubenswrapper[4869]: E0130 10:55:14.133309 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.141767 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-06 18:46:16.911929143 +0000 UTC Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.200211 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.200260 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.200273 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.200294 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.200307 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:14Z","lastTransitionTime":"2026-01-30T10:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.302648 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.302694 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.302725 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.302743 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.302752 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:14Z","lastTransitionTime":"2026-01-30T10:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.404821 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.404869 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.404881 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.404899 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.404910 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:14Z","lastTransitionTime":"2026-01-30T10:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.507346 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.507748 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.507841 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.507914 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.507986 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:14Z","lastTransitionTime":"2026-01-30T10:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.610398 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.610452 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.610462 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.610482 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.610492 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:14Z","lastTransitionTime":"2026-01-30T10:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.712421 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.712460 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.712470 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.712490 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.712499 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:14Z","lastTransitionTime":"2026-01-30T10:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.814638 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.814669 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.814678 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.814690 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.814699 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:14Z","lastTransitionTime":"2026-01-30T10:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.916573 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.916617 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.916628 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.916647 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:14 crc kubenswrapper[4869]: I0130 10:55:14.916659 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:14Z","lastTransitionTime":"2026-01-30T10:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.019321 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.019366 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.019376 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.019394 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.019405 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:15Z","lastTransitionTime":"2026-01-30T10:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.121546 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.121593 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.121602 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.121616 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.121625 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:15Z","lastTransitionTime":"2026-01-30T10:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.131945 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:15 crc kubenswrapper[4869]: E0130 10:55:15.132052 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.142164 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 22:49:17.920138765 +0000 UTC Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.223870 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.223913 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.223924 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.223941 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.223954 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:15Z","lastTransitionTime":"2026-01-30T10:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.326466 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.326495 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.326505 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.326518 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.326529 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:15Z","lastTransitionTime":"2026-01-30T10:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.430795 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.430834 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.430844 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.430860 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.430871 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:15Z","lastTransitionTime":"2026-01-30T10:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.533348 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.533385 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.533395 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.533408 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.533417 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:15Z","lastTransitionTime":"2026-01-30T10:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.636504 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.636575 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.636585 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.636603 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.636612 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:15Z","lastTransitionTime":"2026-01-30T10:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.739235 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.739281 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.739290 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.739307 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.739317 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:15Z","lastTransitionTime":"2026-01-30T10:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.841546 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.841596 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.841611 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.841627 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.841638 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:15Z","lastTransitionTime":"2026-01-30T10:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.944388 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.944433 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.944443 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.944461 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:15 crc kubenswrapper[4869]: I0130 10:55:15.944471 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:15Z","lastTransitionTime":"2026-01-30T10:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.047105 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.047160 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.047203 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.047222 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.047234 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:16Z","lastTransitionTime":"2026-01-30T10:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.132312 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.132393 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:16 crc kubenswrapper[4869]: E0130 10:55:16.132498 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.132582 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:16 crc kubenswrapper[4869]: E0130 10:55:16.132669 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:16 crc kubenswrapper[4869]: E0130 10:55:16.132778 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.143198 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 22:19:08.765677265 +0000 UTC Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.149950 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.149994 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.150006 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.150025 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.150037 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:16Z","lastTransitionTime":"2026-01-30T10:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.251958 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.252001 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.252013 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.252027 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.252037 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:16Z","lastTransitionTime":"2026-01-30T10:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.354665 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.354720 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.354733 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.354749 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.354761 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:16Z","lastTransitionTime":"2026-01-30T10:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.457775 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.458081 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.458199 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.458282 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.458346 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:16Z","lastTransitionTime":"2026-01-30T10:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.560199 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.560235 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.560245 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.560290 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.560304 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:16Z","lastTransitionTime":"2026-01-30T10:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.629968 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs\") pod \"network-metrics-daemon-2krt6\" (UID: \"35533ad8-7435-413d-bad1-05a0ca183c0d\") " pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:16 crc kubenswrapper[4869]: E0130 10:55:16.630082 4869 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 10:55:16 crc kubenswrapper[4869]: E0130 10:55:16.630416 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs podName:35533ad8-7435-413d-bad1-05a0ca183c0d nodeName:}" failed. No retries permitted until 2026-01-30 10:55:48.630400495 +0000 UTC m=+99.180276561 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs") pod "network-metrics-daemon-2krt6" (UID: "35533ad8-7435-413d-bad1-05a0ca183c0d") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.662450 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.662475 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.662485 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.662499 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.662511 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:16Z","lastTransitionTime":"2026-01-30T10:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.764911 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.764977 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.764991 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.765056 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.765074 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:16Z","lastTransitionTime":"2026-01-30T10:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.867113 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.867475 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.867548 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.867615 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.867676 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:16Z","lastTransitionTime":"2026-01-30T10:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.881671 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.881700 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.881731 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.881747 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.881757 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:16Z","lastTransitionTime":"2026-01-30T10:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:16 crc kubenswrapper[4869]: E0130 10:55:16.893417 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:16Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.897534 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.897570 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.897579 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.897594 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.897603 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:16Z","lastTransitionTime":"2026-01-30T10:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:16 crc kubenswrapper[4869]: E0130 10:55:16.908870 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:16Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.917822 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.917863 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.917874 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.917890 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.917902 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:16Z","lastTransitionTime":"2026-01-30T10:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:16 crc kubenswrapper[4869]: E0130 10:55:16.929374 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:16Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.933453 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.933492 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.933503 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.933522 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.933532 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:16Z","lastTransitionTime":"2026-01-30T10:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:16 crc kubenswrapper[4869]: E0130 10:55:16.945582 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:16Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.949032 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.949062 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.949071 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.949085 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.949094 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:16Z","lastTransitionTime":"2026-01-30T10:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:16 crc kubenswrapper[4869]: E0130 10:55:16.959307 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:16Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:16 crc kubenswrapper[4869]: E0130 10:55:16.959413 4869 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.970100 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.970136 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.970147 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.970164 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:16 crc kubenswrapper[4869]: I0130 10:55:16.970174 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:16Z","lastTransitionTime":"2026-01-30T10:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.073122 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.073150 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.073157 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.073172 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.073180 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:17Z","lastTransitionTime":"2026-01-30T10:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.132870 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:17 crc kubenswrapper[4869]: E0130 10:55:17.133029 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.144291 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 23:48:10.633992193 +0000 UTC Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.175118 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.175147 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.175155 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.175169 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.175178 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:17Z","lastTransitionTime":"2026-01-30T10:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.277641 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.277685 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.277698 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.277735 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.277749 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:17Z","lastTransitionTime":"2026-01-30T10:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.380486 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.380515 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.380523 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.380536 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.380545 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:17Z","lastTransitionTime":"2026-01-30T10:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.482373 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.482411 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.482420 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.482433 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.482442 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:17Z","lastTransitionTime":"2026-01-30T10:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.585373 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.585512 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.585535 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.585570 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.585590 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:17Z","lastTransitionTime":"2026-01-30T10:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.687929 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.687982 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.687994 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.688012 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.688024 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:17Z","lastTransitionTime":"2026-01-30T10:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.790645 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.790721 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.790734 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.790750 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.790761 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:17Z","lastTransitionTime":"2026-01-30T10:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.893176 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.893760 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.893862 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.894009 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.894101 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:17Z","lastTransitionTime":"2026-01-30T10:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.997487 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.997548 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.997561 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.997597 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:17 crc kubenswrapper[4869]: I0130 10:55:17.997614 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:17Z","lastTransitionTime":"2026-01-30T10:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.099732 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.099770 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.099781 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.099796 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.099807 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:18Z","lastTransitionTime":"2026-01-30T10:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.132241 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.132302 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.132351 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:18 crc kubenswrapper[4869]: E0130 10:55:18.132394 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:18 crc kubenswrapper[4869]: E0130 10:55:18.132527 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:18 crc kubenswrapper[4869]: E0130 10:55:18.132604 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.145290 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 13:36:37.871809325 +0000 UTC Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.202625 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.202667 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.202678 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.202695 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.202730 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:18Z","lastTransitionTime":"2026-01-30T10:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.305597 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.305633 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.305648 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.305665 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.305676 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:18Z","lastTransitionTime":"2026-01-30T10:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.413117 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.413173 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.413186 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.413205 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.413219 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:18Z","lastTransitionTime":"2026-01-30T10:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.515636 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.515670 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.515679 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.515696 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.515727 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:18Z","lastTransitionTime":"2026-01-30T10:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.567848 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5jpbv_02f48f89-74aa-48e8-930e-7a86f15de2de/kube-multus/0.log" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.568172 4869 generic.go:334] "Generic (PLEG): container finished" podID="02f48f89-74aa-48e8-930e-7a86f15de2de" containerID="0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f" exitCode=1 Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.568203 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5jpbv" event={"ID":"02f48f89-74aa-48e8-930e-7a86f15de2de","Type":"ContainerDied","Data":"0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f"} Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.569003 4869 scope.go:117] "RemoveContainer" containerID="0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.584737 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:18Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.596983 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:18Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.609650 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:18Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.618868 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.618907 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.618920 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.618938 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.618950 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:18Z","lastTransitionTime":"2026-01-30T10:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.621078 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2krt6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35533ad8-7435-413d-bad1-05a0ca183c0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2krt6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:18Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.633686 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:18Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.645388 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:18Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.655509 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:18Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.669480 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:55:18Z\\\",\\\"message\\\":\\\"2026-01-30T10:54:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_30a5fbf8-7b5d-41e3-be9e-cb2964793ada\\\\n2026-01-30T10:54:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_30a5fbf8-7b5d-41e3-be9e-cb2964793ada to /host/opt/cni/bin/\\\\n2026-01-30T10:54:33Z [verbose] multus-daemon started\\\\n2026-01-30T10:54:33Z [verbose] Readiness Indicator file check\\\\n2026-01-30T10:55:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:18Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.681564 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc99b1fe-d998-4cfd-8300-a5e01724e03f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00f64135d423694f9306b48b181b21d252331e3f944bc62a7d1b3e2bb9b43716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d2072b5b8d5ad65a1e4720492420f37fbd11e6769c1f64353def4a5ae88f452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a0c731e448df12bd903b8b254fdc31bd2ce2912ad7ae9fd6a03c405b9dcc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:18Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.695654 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:18Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.706599 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:18Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.715093 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:18Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.721869 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.721915 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.721957 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.721981 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.721992 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:18Z","lastTransitionTime":"2026-01-30T10:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.730486 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:18Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.750194 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:54:58Z\\\",\\\"message\\\":\\\"et-xd92c for pod on switch crc\\\\nI0130 10:54:58.941553 6557 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999 after 0 failed attempt(s)\\\\nI0130 10:54:58.941561 6557 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999\\\\nI0130 10:54:58.941420 6557 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/network-metrics-daemon-2krt6\\\\nI0130 10:54:58.941574 6557 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/network-metrics-daemon-2krt6\\\\nI0130 10:54:58.941574 6557 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI0130 10:54:58.941580 6557 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-2krt6 in node crc\\\\nI0130 10:54:58.941603 6557 base_network_controller_pods.go:477] [default/openshift-multus/network-metrics-daemon-2krt6] creating logical port openshift-multus_network-metrics-daemon-2krt6 for pod on switch crc\\\\nF0130 10:54:58.941393 6557 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-twvdq_openshift-ovn-kubernetes(3e4cac66-8338-46fe-8296-ce9dbd2257bd)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:18Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.763005 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23455e82-301e-4eaa-9358-5f00c6840ca7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d40b6dbc7efd9d8c9011700f004948317ab65b7d5c505b4b83f0751d5b8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa7a5f4026112fef4830817779f204590f20cea60b32bd1033ee99040375e03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-ww999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:18Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.780051 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:18Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.791377 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:18Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.802663 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:18Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.824339 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.824383 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.824395 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.824413 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.824425 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:18Z","lastTransitionTime":"2026-01-30T10:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.927699 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.927767 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.927780 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.927799 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:18 crc kubenswrapper[4869]: I0130 10:55:18.927810 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:18Z","lastTransitionTime":"2026-01-30T10:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.031333 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.031399 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.031412 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.031434 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.031445 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:19Z","lastTransitionTime":"2026-01-30T10:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.132438 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:19 crc kubenswrapper[4869]: E0130 10:55:19.132581 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.134386 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.134513 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.134558 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.134600 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.134633 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:19Z","lastTransitionTime":"2026-01-30T10:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.145560 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 18:33:21.747741037 +0000 UTC Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.237009 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.237039 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.237050 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.237064 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.237073 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:19Z","lastTransitionTime":"2026-01-30T10:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.340890 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.340933 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.340942 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.340959 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.340969 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:19Z","lastTransitionTime":"2026-01-30T10:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.443385 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.443441 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.443450 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.443467 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.443479 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:19Z","lastTransitionTime":"2026-01-30T10:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.546174 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.546211 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.546221 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.546236 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.546245 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:19Z","lastTransitionTime":"2026-01-30T10:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.572921 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5jpbv_02f48f89-74aa-48e8-930e-7a86f15de2de/kube-multus/0.log" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.572985 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5jpbv" event={"ID":"02f48f89-74aa-48e8-930e-7a86f15de2de","Type":"ContainerStarted","Data":"e6654bc5cbb371c1a2362dee3aa8930ddf905299c3fe6cc1805e31c315b21024"} Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.588156 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc99b1fe-d998-4cfd-8300-a5e01724e03f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00f64135d423694f9306b48b181b21d252331e3f944bc62a7d1b3e2bb9b43716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d2072b5b8d5ad65a1e4720492420f37fbd11e6769c1f64353def4a5ae88f452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a0c731e448df12bd903b8b254fdc31bd2ce2912ad7ae9fd6a03c405b9dcc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:19Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.602552 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:19Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.613400 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:19Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.621086 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:19Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.634071 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:19Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.648447 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.648476 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.648484 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.648497 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.648508 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:19Z","lastTransitionTime":"2026-01-30T10:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.650603 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:54:58Z\\\",\\\"message\\\":\\\"et-xd92c for pod on switch crc\\\\nI0130 10:54:58.941553 6557 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999 after 0 failed attempt(s)\\\\nI0130 10:54:58.941561 6557 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999\\\\nI0130 10:54:58.941420 6557 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/network-metrics-daemon-2krt6\\\\nI0130 10:54:58.941574 6557 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/network-metrics-daemon-2krt6\\\\nI0130 10:54:58.941574 6557 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI0130 10:54:58.941580 6557 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-2krt6 in node crc\\\\nI0130 10:54:58.941603 6557 base_network_controller_pods.go:477] [default/openshift-multus/network-metrics-daemon-2krt6] creating logical port openshift-multus_network-metrics-daemon-2krt6 for pod on switch crc\\\\nF0130 10:54:58.941393 6557 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-twvdq_openshift-ovn-kubernetes(3e4cac66-8338-46fe-8296-ce9dbd2257bd)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:19Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.661633 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23455e82-301e-4eaa-9358-5f00c6840ca7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d40b6dbc7efd9d8c9011700f004948317ab65b7d5c505b4b83f0751d5b8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa7a5f4026112fef4830817779f204590f20cea60b32bd1033ee99040375e03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-ww999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:19Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.679126 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:19Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.691370 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:19Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.703582 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:19Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.715608 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:19Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.725102 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:19Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.739702 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:19Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.750459 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2krt6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35533ad8-7435-413d-bad1-05a0ca183c0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2krt6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:19Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.751568 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.751598 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.751610 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.751625 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.751635 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:19Z","lastTransitionTime":"2026-01-30T10:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.763218 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:19Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.774846 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:19Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.785037 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:19Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.798877 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6654bc5cbb371c1a2362dee3aa8930ddf905299c3fe6cc1805e31c315b21024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:55:18Z\\\",\\\"message\\\":\\\"2026-01-30T10:54:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_30a5fbf8-7b5d-41e3-be9e-cb2964793ada\\\\n2026-01-30T10:54:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_30a5fbf8-7b5d-41e3-be9e-cb2964793ada to /host/opt/cni/bin/\\\\n2026-01-30T10:54:33Z [verbose] multus-daemon started\\\\n2026-01-30T10:54:33Z [verbose] Readiness Indicator file check\\\\n2026-01-30T10:55:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:19Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.854081 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.854129 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.854141 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.854159 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.854170 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:19Z","lastTransitionTime":"2026-01-30T10:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.957106 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.957146 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.957157 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.957174 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:19 crc kubenswrapper[4869]: I0130 10:55:19.957185 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:19Z","lastTransitionTime":"2026-01-30T10:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.059351 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.059402 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.059422 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.059444 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.059462 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:20Z","lastTransitionTime":"2026-01-30T10:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.132155 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.132162 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:20 crc kubenswrapper[4869]: E0130 10:55:20.132595 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.132238 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:20 crc kubenswrapper[4869]: E0130 10:55:20.132990 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:20 crc kubenswrapper[4869]: E0130 10:55:20.132755 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.143862 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc99b1fe-d998-4cfd-8300-a5e01724e03f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00f64135d423694f9306b48b181b21d252331e3f944bc62a7d1b3e2bb9b43716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d2072b5b8d5ad65a1e4720492420f37fbd11e6769c1f64353def4a5ae88f452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a0c731e448df12bd903b8b254fdc31bd2ce2912ad7ae9fd6a03c405b9dcc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:20Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.148784 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 22:25:00.465504091 +0000 UTC Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.154065 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:20Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.162034 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.162255 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.162322 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.162427 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.162501 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:20Z","lastTransitionTime":"2026-01-30T10:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.166022 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:20Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.182924 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:54:58Z\\\",\\\"message\\\":\\\"et-xd92c for pod on switch crc\\\\nI0130 10:54:58.941553 6557 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999 after 0 failed attempt(s)\\\\nI0130 10:54:58.941561 6557 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999\\\\nI0130 10:54:58.941420 6557 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/network-metrics-daemon-2krt6\\\\nI0130 10:54:58.941574 6557 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/network-metrics-daemon-2krt6\\\\nI0130 10:54:58.941574 6557 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI0130 10:54:58.941580 6557 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-2krt6 in node crc\\\\nI0130 10:54:58.941603 6557 base_network_controller_pods.go:477] [default/openshift-multus/network-metrics-daemon-2krt6] creating logical port openshift-multus_network-metrics-daemon-2krt6 for pod on switch crc\\\\nF0130 10:54:58.941393 6557 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-twvdq_openshift-ovn-kubernetes(3e4cac66-8338-46fe-8296-ce9dbd2257bd)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:20Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.192465 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23455e82-301e-4eaa-9358-5f00c6840ca7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d40b6dbc7efd9d8c9011700f004948317ab65b7d5c505b4b83f0751d5b8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa7a5f4026112fef4830817779f204590f20cea60b32bd1033ee99040375e03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-ww999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:20Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.209006 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:20Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.221176 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:20Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.232543 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:20Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.242826 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:20Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.255276 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:20Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.266417 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:20Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.267055 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.267109 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.267122 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.267139 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.267151 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:20Z","lastTransitionTime":"2026-01-30T10:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.276030 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:20Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.288095 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:20Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.297922 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:20Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.307325 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:20Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.319089 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6654bc5cbb371c1a2362dee3aa8930ddf905299c3fe6cc1805e31c315b21024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:55:18Z\\\",\\\"message\\\":\\\"2026-01-30T10:54:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_30a5fbf8-7b5d-41e3-be9e-cb2964793ada\\\\n2026-01-30T10:54:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_30a5fbf8-7b5d-41e3-be9e-cb2964793ada to /host/opt/cni/bin/\\\\n2026-01-30T10:54:33Z [verbose] multus-daemon started\\\\n2026-01-30T10:54:33Z [verbose] Readiness Indicator file check\\\\n2026-01-30T10:55:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:20Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.328482 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:20Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.339413 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2krt6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35533ad8-7435-413d-bad1-05a0ca183c0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2krt6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:20Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.369167 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.369230 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.369241 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.369258 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.369269 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:20Z","lastTransitionTime":"2026-01-30T10:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.470918 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.470960 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.470972 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.470989 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.471001 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:20Z","lastTransitionTime":"2026-01-30T10:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.572655 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.572697 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.572729 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.572749 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.572758 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:20Z","lastTransitionTime":"2026-01-30T10:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.675160 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.675197 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.675209 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.675226 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.675237 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:20Z","lastTransitionTime":"2026-01-30T10:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.777327 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.777369 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.777381 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.777399 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.777409 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:20Z","lastTransitionTime":"2026-01-30T10:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.879330 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.879368 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.879379 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.879421 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.879433 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:20Z","lastTransitionTime":"2026-01-30T10:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.982282 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.982339 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.982350 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.982369 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:20 crc kubenswrapper[4869]: I0130 10:55:20.982380 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:20Z","lastTransitionTime":"2026-01-30T10:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.084810 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.084862 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.084871 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.084891 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.084901 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:21Z","lastTransitionTime":"2026-01-30T10:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.132470 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:21 crc kubenswrapper[4869]: E0130 10:55:21.132655 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.149686 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 16:19:27.953604304 +0000 UTC Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.187064 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.187107 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.187119 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.187135 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.187145 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:21Z","lastTransitionTime":"2026-01-30T10:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.290073 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.290125 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.290216 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.290253 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.290268 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:21Z","lastTransitionTime":"2026-01-30T10:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.393336 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.393366 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.393374 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.393390 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.393400 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:21Z","lastTransitionTime":"2026-01-30T10:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.495968 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.496011 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.496020 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.496032 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.496041 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:21Z","lastTransitionTime":"2026-01-30T10:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.597989 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.598028 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.598036 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.598057 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.598074 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:21Z","lastTransitionTime":"2026-01-30T10:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.700562 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.700599 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.700607 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.700622 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.700631 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:21Z","lastTransitionTime":"2026-01-30T10:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.803301 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.803346 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.803358 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.803375 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.803386 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:21Z","lastTransitionTime":"2026-01-30T10:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.905256 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.905312 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.905324 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.905342 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:21 crc kubenswrapper[4869]: I0130 10:55:21.905353 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:21Z","lastTransitionTime":"2026-01-30T10:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.007955 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.007993 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.008003 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.008020 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.008029 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:22Z","lastTransitionTime":"2026-01-30T10:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.109946 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.109986 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.109999 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.110017 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.110029 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:22Z","lastTransitionTime":"2026-01-30T10:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.132401 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.132426 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:22 crc kubenswrapper[4869]: E0130 10:55:22.132543 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.132426 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:22 crc kubenswrapper[4869]: E0130 10:55:22.132626 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:22 crc kubenswrapper[4869]: E0130 10:55:22.132731 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.150468 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 13:27:44.568909045 +0000 UTC Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.212556 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.212604 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.212616 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.212633 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.212644 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:22Z","lastTransitionTime":"2026-01-30T10:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.315367 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.315409 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.315418 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.315434 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.315444 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:22Z","lastTransitionTime":"2026-01-30T10:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.418504 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.418548 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.418561 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.418581 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.418594 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:22Z","lastTransitionTime":"2026-01-30T10:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.520719 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.520758 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.520769 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.520786 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.520797 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:22Z","lastTransitionTime":"2026-01-30T10:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.623368 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.623407 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.623422 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.623438 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.623463 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:22Z","lastTransitionTime":"2026-01-30T10:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.725417 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.725461 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.725473 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.725491 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.725506 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:22Z","lastTransitionTime":"2026-01-30T10:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.827746 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.827782 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.827791 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.827805 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.827814 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:22Z","lastTransitionTime":"2026-01-30T10:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.930031 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.930078 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.930089 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.930106 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:22 crc kubenswrapper[4869]: I0130 10:55:22.930121 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:22Z","lastTransitionTime":"2026-01-30T10:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.032143 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.032181 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.032192 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.032210 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.032221 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:23Z","lastTransitionTime":"2026-01-30T10:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.132249 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:23 crc kubenswrapper[4869]: E0130 10:55:23.132395 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.134093 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.134128 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.134136 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.134151 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.134159 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:23Z","lastTransitionTime":"2026-01-30T10:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.151582 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 06:50:44.434118793 +0000 UTC Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.236586 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.236619 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.236632 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.236646 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.236657 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:23Z","lastTransitionTime":"2026-01-30T10:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.339238 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.339269 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.339278 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.339292 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.339300 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:23Z","lastTransitionTime":"2026-01-30T10:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.441425 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.441463 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.441471 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.441486 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.441495 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:23Z","lastTransitionTime":"2026-01-30T10:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.544803 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.544848 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.544861 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.544879 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.544892 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:23Z","lastTransitionTime":"2026-01-30T10:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.646854 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.646897 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.646907 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.646923 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.646992 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:23Z","lastTransitionTime":"2026-01-30T10:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.749566 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.749606 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.749617 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.749632 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.749642 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:23Z","lastTransitionTime":"2026-01-30T10:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.853185 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.853219 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.853231 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.853248 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.853259 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:23Z","lastTransitionTime":"2026-01-30T10:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.956081 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.956121 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.956134 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.956152 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:23 crc kubenswrapper[4869]: I0130 10:55:23.956163 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:23Z","lastTransitionTime":"2026-01-30T10:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.058184 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.058218 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.058227 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.058242 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.058253 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:24Z","lastTransitionTime":"2026-01-30T10:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.132442 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.132502 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.132470 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:24 crc kubenswrapper[4869]: E0130 10:55:24.132621 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:24 crc kubenswrapper[4869]: E0130 10:55:24.132780 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:24 crc kubenswrapper[4869]: E0130 10:55:24.132862 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.151956 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 10:45:14.974715974 +0000 UTC Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.160431 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.160504 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.160520 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.160536 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.160546 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:24Z","lastTransitionTime":"2026-01-30T10:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.262671 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.262750 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.262764 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.262783 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.262795 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:24Z","lastTransitionTime":"2026-01-30T10:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.364842 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.364920 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.364935 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.364951 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.364963 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:24Z","lastTransitionTime":"2026-01-30T10:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.467158 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.467198 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.467207 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.467222 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.467232 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:24Z","lastTransitionTime":"2026-01-30T10:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.570324 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.570374 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.570387 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.570404 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.570416 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:24Z","lastTransitionTime":"2026-01-30T10:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.673402 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.673454 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.673471 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.673491 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.673503 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:24Z","lastTransitionTime":"2026-01-30T10:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.776015 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.776072 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.776085 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.776101 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.776113 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:24Z","lastTransitionTime":"2026-01-30T10:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.878444 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.878492 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.878505 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.878526 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.878539 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:24Z","lastTransitionTime":"2026-01-30T10:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.980825 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.980888 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.980905 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.980923 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:24 crc kubenswrapper[4869]: I0130 10:55:24.980938 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:24Z","lastTransitionTime":"2026-01-30T10:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.083827 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.083874 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.083888 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.083903 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.083913 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:25Z","lastTransitionTime":"2026-01-30T10:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.132493 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:25 crc kubenswrapper[4869]: E0130 10:55:25.132656 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.152915 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 04:42:51.456982561 +0000 UTC Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.186901 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.186945 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.186956 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.186973 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.186981 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:25Z","lastTransitionTime":"2026-01-30T10:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.290616 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.290680 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.290744 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.290778 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.290801 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:25Z","lastTransitionTime":"2026-01-30T10:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.394031 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.394077 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.394086 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.394101 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.394113 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:25Z","lastTransitionTime":"2026-01-30T10:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.496210 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.496253 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.496261 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.496277 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.496287 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:25Z","lastTransitionTime":"2026-01-30T10:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.598177 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.598225 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.598239 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.598256 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.598270 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:25Z","lastTransitionTime":"2026-01-30T10:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.700593 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.700661 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.700683 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.700757 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.700782 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:25Z","lastTransitionTime":"2026-01-30T10:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.803246 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.803289 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.803300 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.803316 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.803328 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:25Z","lastTransitionTime":"2026-01-30T10:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.905925 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.905987 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.906007 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.906026 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:25 crc kubenswrapper[4869]: I0130 10:55:25.906051 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:25Z","lastTransitionTime":"2026-01-30T10:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.008004 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.008059 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.008073 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.008090 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.008102 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:26Z","lastTransitionTime":"2026-01-30T10:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.110495 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.110554 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.110565 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.110585 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.110597 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:26Z","lastTransitionTime":"2026-01-30T10:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.132219 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.132323 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:26 crc kubenswrapper[4869]: E0130 10:55:26.132372 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.132474 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:26 crc kubenswrapper[4869]: E0130 10:55:26.132777 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:26 crc kubenswrapper[4869]: E0130 10:55:26.132897 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.136734 4869 scope.go:117] "RemoveContainer" containerID="e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.153421 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 16:18:51.351477343 +0000 UTC Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.213273 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.213308 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.213319 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.213336 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.213346 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:26Z","lastTransitionTime":"2026-01-30T10:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.315929 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.315960 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.315970 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.315985 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.315994 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:26Z","lastTransitionTime":"2026-01-30T10:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.418207 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.418250 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.418261 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.418278 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.418288 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:26Z","lastTransitionTime":"2026-01-30T10:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.520698 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.520758 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.520769 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.520785 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.520796 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:26Z","lastTransitionTime":"2026-01-30T10:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.595741 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvdq_3e4cac66-8338-46fe-8296-ce9dbd2257bd/ovnkube-controller/2.log" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.633454 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.633484 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.633494 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.633510 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.633522 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:26Z","lastTransitionTime":"2026-01-30T10:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.635688 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerStarted","Data":"32a26c6c526e3accc4bdba8be97b33df3cd756ca0405ab85d6b12e552e50cebe"} Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.636088 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.652161 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:26Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.667750 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:26Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.677949 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:26Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.691602 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6654bc5cbb371c1a2362dee3aa8930ddf905299c3fe6cc1805e31c315b21024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:55:18Z\\\",\\\"message\\\":\\\"2026-01-30T10:54:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_30a5fbf8-7b5d-41e3-be9e-cb2964793ada\\\\n2026-01-30T10:54:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_30a5fbf8-7b5d-41e3-be9e-cb2964793ada to /host/opt/cni/bin/\\\\n2026-01-30T10:54:33Z [verbose] multus-daemon started\\\\n2026-01-30T10:54:33Z [verbose] Readiness Indicator file check\\\\n2026-01-30T10:55:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:26Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.704150 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:26Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.713092 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2krt6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35533ad8-7435-413d-bad1-05a0ca183c0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2krt6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:26Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.723379 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc99b1fe-d998-4cfd-8300-a5e01724e03f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00f64135d423694f9306b48b181b21d252331e3f944bc62a7d1b3e2bb9b43716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d2072b5b8d5ad65a1e4720492420f37fbd11e6769c1f64353def4a5ae88f452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a0c731e448df12bd903b8b254fdc31bd2ce2912ad7ae9fd6a03c405b9dcc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:26Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.735153 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.735189 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.735198 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.735212 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.735221 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:26Z","lastTransitionTime":"2026-01-30T10:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.735308 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:26Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.746816 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:26Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.762491 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a26c6c526e3accc4bdba8be97b33df3cd756ca0405ab85d6b12e552e50cebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:54:58Z\\\",\\\"message\\\":\\\"et-xd92c for pod on switch crc\\\\nI0130 10:54:58.941553 6557 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999 after 0 failed attempt(s)\\\\nI0130 10:54:58.941561 6557 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999\\\\nI0130 10:54:58.941420 6557 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/network-metrics-daemon-2krt6\\\\nI0130 10:54:58.941574 6557 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/network-metrics-daemon-2krt6\\\\nI0130 10:54:58.941574 6557 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI0130 10:54:58.941580 6557 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-2krt6 in node crc\\\\nI0130 10:54:58.941603 6557 base_network_controller_pods.go:477] [default/openshift-multus/network-metrics-daemon-2krt6] creating logical port openshift-multus_network-metrics-daemon-2krt6 for pod on switch crc\\\\nF0130 10:54:58.941393 6557 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:55:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:26Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.774385 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23455e82-301e-4eaa-9358-5f00c6840ca7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d40b6dbc7efd9d8c9011700f004948317ab65b7d5c505b4b83f0751d5b8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa7a5f4026112fef4830817779f204590f20cea60b32bd1033ee99040375e03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-ww999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:26Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.794432 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:26Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.812270 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:26Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.831178 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:26Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.837164 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.837209 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.837219 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.837245 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.837256 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:26Z","lastTransitionTime":"2026-01-30T10:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.842545 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:26Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.857184 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:26Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.869395 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:26Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.881580 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:26Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.939448 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.939493 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.939505 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.939522 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:26 crc kubenswrapper[4869]: I0130 10:55:26.939534 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:26Z","lastTransitionTime":"2026-01-30T10:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.042027 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.042077 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.042096 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.042114 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.042127 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:27Z","lastTransitionTime":"2026-01-30T10:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.131968 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:27 crc kubenswrapper[4869]: E0130 10:55:27.132126 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.145070 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.145098 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.145112 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.145126 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.145137 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:27Z","lastTransitionTime":"2026-01-30T10:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.153830 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 20:08:02.154246266 +0000 UTC Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.164247 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.164288 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.164308 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.164326 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.164337 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:27Z","lastTransitionTime":"2026-01-30T10:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:27 crc kubenswrapper[4869]: E0130 10:55:27.178474 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:27Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.182734 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.182761 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.182772 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.182786 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.182799 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:27Z","lastTransitionTime":"2026-01-30T10:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:27 crc kubenswrapper[4869]: E0130 10:55:27.204549 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:27Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.209136 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.209180 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.209194 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.209213 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.209226 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:27Z","lastTransitionTime":"2026-01-30T10:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:27 crc kubenswrapper[4869]: E0130 10:55:27.225229 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:27Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.230142 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.230189 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.230204 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.230222 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.230234 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:27Z","lastTransitionTime":"2026-01-30T10:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:27 crc kubenswrapper[4869]: E0130 10:55:27.242231 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:27Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.246461 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.246504 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.246514 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.246530 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.246540 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:27Z","lastTransitionTime":"2026-01-30T10:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:27 crc kubenswrapper[4869]: E0130 10:55:27.261310 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:27Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:27 crc kubenswrapper[4869]: E0130 10:55:27.261489 4869 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.263290 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.263346 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.263368 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.263386 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.263399 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:27Z","lastTransitionTime":"2026-01-30T10:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.366439 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.366514 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.366537 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.366565 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.366586 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:27Z","lastTransitionTime":"2026-01-30T10:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.469403 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.469433 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.469445 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.469459 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.469470 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:27Z","lastTransitionTime":"2026-01-30T10:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.571843 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.571884 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.571897 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.571913 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.571922 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:27Z","lastTransitionTime":"2026-01-30T10:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.640272 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvdq_3e4cac66-8338-46fe-8296-ce9dbd2257bd/ovnkube-controller/3.log" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.640870 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvdq_3e4cac66-8338-46fe-8296-ce9dbd2257bd/ovnkube-controller/2.log" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.642971 4869 generic.go:334] "Generic (PLEG): container finished" podID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerID="32a26c6c526e3accc4bdba8be97b33df3cd756ca0405ab85d6b12e552e50cebe" exitCode=1 Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.643010 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerDied","Data":"32a26c6c526e3accc4bdba8be97b33df3cd756ca0405ab85d6b12e552e50cebe"} Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.643088 4869 scope.go:117] "RemoveContainer" containerID="e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.643594 4869 scope.go:117] "RemoveContainer" containerID="32a26c6c526e3accc4bdba8be97b33df3cd756ca0405ab85d6b12e552e50cebe" Jan 30 10:55:27 crc kubenswrapper[4869]: E0130 10:55:27.643783 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-twvdq_openshift-ovn-kubernetes(3e4cac66-8338-46fe-8296-ce9dbd2257bd)\"" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.659239 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:27Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.674481 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.674525 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.674536 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.674551 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.674559 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:27Z","lastTransitionTime":"2026-01-30T10:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.678280 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a26c6c526e3accc4bdba8be97b33df3cd756ca0405ab85d6b12e552e50cebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1212cc02e8d0682896486989d4dafc75a7c12ac04bd5e24e790e4c739376f9b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:54:58Z\\\",\\\"message\\\":\\\"et-xd92c for pod on switch crc\\\\nI0130 10:54:58.941553 6557 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999 after 0 failed attempt(s)\\\\nI0130 10:54:58.941561 6557 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999\\\\nI0130 10:54:58.941420 6557 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/network-metrics-daemon-2krt6\\\\nI0130 10:54:58.941574 6557 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/network-metrics-daemon-2krt6\\\\nI0130 10:54:58.941574 6557 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI0130 10:54:58.941580 6557 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-2krt6 in node crc\\\\nI0130 10:54:58.941603 6557 base_network_controller_pods.go:477] [default/openshift-multus/network-metrics-daemon-2krt6] creating logical port openshift-multus_network-metrics-daemon-2krt6 for pod on switch crc\\\\nF0130 10:54:58.941393 6557 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32a26c6c526e3accc4bdba8be97b33df3cd756ca0405ab85d6b12e552e50cebe\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:55:26Z\\\",\\\"message\\\":\\\"ling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:26Z is after 2025-08-24T17:21:41Z]\\\\nI0130 10:55:26.912738 6954 obj_retry.go:303] Retry object setup: *v1.Pod openshift-dns/node-resolver-4dlfn\\\\nI0130 10:55:26.912691 6954 services_controller.go:434] Service openshift-console-operator/metrics retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{metrics openshift-console-operator e4559ce3-2d5a-470f-b8bf-4c8b054d2335 11843 0 2025-02-23 05:38:55 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[name:console-operator] map[capability.openshift.io/name:Console include.release.openshift.io/hypershift:true include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00740494f \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:55:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:27Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.687236 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23455e82-301e-4eaa-9358-5f00c6840ca7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d40b6dbc7efd9d8c9011700f004948317ab65b7d5c505b4b83f0751d5b8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa7a5f4026112fef4830817779f204590f20cea60b32bd1033ee99040375e03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-ww999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:27Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.704266 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:27Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.716025 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:27Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.726807 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:27Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.735788 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:27Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.745775 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:27Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.754638 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:27Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.763725 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2krt6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35533ad8-7435-413d-bad1-05a0ca183c0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2krt6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:27Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.774956 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:27Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.776478 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.776500 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.776509 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.776523 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.776532 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:27Z","lastTransitionTime":"2026-01-30T10:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.785017 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:27Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.795135 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:27Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.805442 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6654bc5cbb371c1a2362dee3aa8930ddf905299c3fe6cc1805e31c315b21024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:55:18Z\\\",\\\"message\\\":\\\"2026-01-30T10:54:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_30a5fbf8-7b5d-41e3-be9e-cb2964793ada\\\\n2026-01-30T10:54:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_30a5fbf8-7b5d-41e3-be9e-cb2964793ada to /host/opt/cni/bin/\\\\n2026-01-30T10:54:33Z [verbose] multus-daemon started\\\\n2026-01-30T10:54:33Z [verbose] Readiness Indicator file check\\\\n2026-01-30T10:55:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:27Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.814527 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:27Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.824641 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc99b1fe-d998-4cfd-8300-a5e01724e03f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00f64135d423694f9306b48b181b21d252331e3f944bc62a7d1b3e2bb9b43716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d2072b5b8d5ad65a1e4720492420f37fbd11e6769c1f64353def4a5ae88f452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a0c731e448df12bd903b8b254fdc31bd2ce2912ad7ae9fd6a03c405b9dcc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:27Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.834629 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:27Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.844969 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:27Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.878673 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.878733 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.878754 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.878770 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.878779 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:27Z","lastTransitionTime":"2026-01-30T10:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.982598 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.983261 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.984120 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.984171 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:27 crc kubenswrapper[4869]: I0130 10:55:27.984190 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:27Z","lastTransitionTime":"2026-01-30T10:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.087331 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.087367 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.087379 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.087397 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.087408 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:28Z","lastTransitionTime":"2026-01-30T10:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.132259 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.132306 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.132379 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:28 crc kubenswrapper[4869]: E0130 10:55:28.132457 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:28 crc kubenswrapper[4869]: E0130 10:55:28.132544 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:28 crc kubenswrapper[4869]: E0130 10:55:28.132671 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.154559 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 15:06:25.188392061 +0000 UTC Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.190490 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.190537 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.190549 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.190568 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.190581 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:28Z","lastTransitionTime":"2026-01-30T10:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.292803 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.292838 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.292846 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.292860 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.292869 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:28Z","lastTransitionTime":"2026-01-30T10:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.395806 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.395836 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.395844 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.395860 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.395869 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:28Z","lastTransitionTime":"2026-01-30T10:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.499317 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.499362 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.499370 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.499388 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.499397 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:28Z","lastTransitionTime":"2026-01-30T10:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.602363 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.602448 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.602472 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.602504 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.602524 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:28Z","lastTransitionTime":"2026-01-30T10:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.649577 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvdq_3e4cac66-8338-46fe-8296-ce9dbd2257bd/ovnkube-controller/3.log" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.655033 4869 scope.go:117] "RemoveContainer" containerID="32a26c6c526e3accc4bdba8be97b33df3cd756ca0405ab85d6b12e552e50cebe" Jan 30 10:55:28 crc kubenswrapper[4869]: E0130 10:55:28.655307 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-twvdq_openshift-ovn-kubernetes(3e4cac66-8338-46fe-8296-ce9dbd2257bd)\"" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.672054 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:28Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.684633 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:28Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.697514 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:28Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.705949 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.705984 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.705993 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.706008 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.706017 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:28Z","lastTransitionTime":"2026-01-30T10:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.710065 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:28Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.720661 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:28Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.734152 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6654bc5cbb371c1a2362dee3aa8930ddf905299c3fe6cc1805e31c315b21024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:55:18Z\\\",\\\"message\\\":\\\"2026-01-30T10:54:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_30a5fbf8-7b5d-41e3-be9e-cb2964793ada\\\\n2026-01-30T10:54:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_30a5fbf8-7b5d-41e3-be9e-cb2964793ada to /host/opt/cni/bin/\\\\n2026-01-30T10:54:33Z [verbose] multus-daemon started\\\\n2026-01-30T10:54:33Z [verbose] Readiness Indicator file check\\\\n2026-01-30T10:55:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:28Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.745375 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:28Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.757911 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2krt6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35533ad8-7435-413d-bad1-05a0ca183c0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2krt6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:28Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.770461 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc99b1fe-d998-4cfd-8300-a5e01724e03f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00f64135d423694f9306b48b181b21d252331e3f944bc62a7d1b3e2bb9b43716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d2072b5b8d5ad65a1e4720492420f37fbd11e6769c1f64353def4a5ae88f452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a0c731e448df12bd903b8b254fdc31bd2ce2912ad7ae9fd6a03c405b9dcc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:28Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.809442 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.809503 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.809521 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.809544 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.809559 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:28Z","lastTransitionTime":"2026-01-30T10:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.809793 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:28Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.846851 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:28Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.868359 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a26c6c526e3accc4bdba8be97b33df3cd756ca0405ab85d6b12e552e50cebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32a26c6c526e3accc4bdba8be97b33df3cd756ca0405ab85d6b12e552e50cebe\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:55:26Z\\\",\\\"message\\\":\\\"ling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:26Z is after 2025-08-24T17:21:41Z]\\\\nI0130 10:55:26.912738 6954 obj_retry.go:303] Retry object setup: *v1.Pod openshift-dns/node-resolver-4dlfn\\\\nI0130 10:55:26.912691 6954 services_controller.go:434] Service openshift-console-operator/metrics retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{metrics openshift-console-operator e4559ce3-2d5a-470f-b8bf-4c8b054d2335 11843 0 2025-02-23 05:38:55 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[name:console-operator] map[capability.openshift.io/name:Console include.release.openshift.io/hypershift:true include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00740494f \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:55:26Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-twvdq_openshift-ovn-kubernetes(3e4cac66-8338-46fe-8296-ce9dbd2257bd)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:28Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.883064 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23455e82-301e-4eaa-9358-5f00c6840ca7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d40b6dbc7efd9d8c9011700f004948317ab65b7d5c505b4b83f0751d5b8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa7a5f4026112fef4830817779f204590f20cea60b32bd1033ee99040375e03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-ww999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:28Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.905705 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:28Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.911298 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.911340 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.911372 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.911390 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.911402 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:28Z","lastTransitionTime":"2026-01-30T10:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.918871 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:28Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.929406 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:28Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.937028 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:28Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:28 crc kubenswrapper[4869]: I0130 10:55:28.950403 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:28Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.013672 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.013739 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.013752 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.013769 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.013781 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:29Z","lastTransitionTime":"2026-01-30T10:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.115971 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.116014 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.116029 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.116063 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.116084 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:29Z","lastTransitionTime":"2026-01-30T10:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.132488 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:29 crc kubenswrapper[4869]: E0130 10:55:29.132627 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.154778 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 21:13:14.340206329 +0000 UTC Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.219287 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.219326 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.219336 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.219353 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.219364 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:29Z","lastTransitionTime":"2026-01-30T10:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.321944 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.321987 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.322003 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.322020 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.322030 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:29Z","lastTransitionTime":"2026-01-30T10:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.424845 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.424890 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.424900 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.424916 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.424925 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:29Z","lastTransitionTime":"2026-01-30T10:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.527462 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.527533 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.527545 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.527562 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.527574 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:29Z","lastTransitionTime":"2026-01-30T10:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.629876 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.629945 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.629955 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.629969 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.629978 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:29Z","lastTransitionTime":"2026-01-30T10:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.731968 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.732019 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.732029 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.732044 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.732053 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:29Z","lastTransitionTime":"2026-01-30T10:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.834796 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.834825 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.834832 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.834845 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.834853 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:29Z","lastTransitionTime":"2026-01-30T10:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.937140 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.937173 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.937182 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.937195 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:29 crc kubenswrapper[4869]: I0130 10:55:29.937205 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:29Z","lastTransitionTime":"2026-01-30T10:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.039591 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.039629 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.039639 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.039653 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.039663 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:30Z","lastTransitionTime":"2026-01-30T10:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.132439 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.132538 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.132719 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:30 crc kubenswrapper[4869]: E0130 10:55:30.132772 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:30 crc kubenswrapper[4869]: E0130 10:55:30.132863 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:30 crc kubenswrapper[4869]: E0130 10:55:30.133519 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.141362 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.141415 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.141426 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.141440 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.141452 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:30Z","lastTransitionTime":"2026-01-30T10:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.155813 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 16:00:42.444902908 +0000 UTC Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.171263 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc99b1fe-d998-4cfd-8300-a5e01724e03f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00f64135d423694f9306b48b181b21d252331e3f944bc62a7d1b3e2bb9b43716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d2072b5b8d5ad65a1e4720492420f37fbd11e6769c1f64353def4a5ae88f452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a0c731e448df12bd903b8b254fdc31bd2ce2912ad7ae9fd6a03c405b9dcc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:30Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.185941 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:30Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.198303 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:30Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.219359 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a26c6c526e3accc4bdba8be97b33df3cd756ca0405ab85d6b12e552e50cebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32a26c6c526e3accc4bdba8be97b33df3cd756ca0405ab85d6b12e552e50cebe\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:55:26Z\\\",\\\"message\\\":\\\"ling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:26Z is after 2025-08-24T17:21:41Z]\\\\nI0130 10:55:26.912738 6954 obj_retry.go:303] Retry object setup: *v1.Pod openshift-dns/node-resolver-4dlfn\\\\nI0130 10:55:26.912691 6954 services_controller.go:434] Service openshift-console-operator/metrics retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{metrics openshift-console-operator e4559ce3-2d5a-470f-b8bf-4c8b054d2335 11843 0 2025-02-23 05:38:55 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[name:console-operator] map[capability.openshift.io/name:Console include.release.openshift.io/hypershift:true include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00740494f \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:55:26Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-twvdq_openshift-ovn-kubernetes(3e4cac66-8338-46fe-8296-ce9dbd2257bd)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:30Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.244966 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23455e82-301e-4eaa-9358-5f00c6840ca7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d40b6dbc7efd9d8c9011700f004948317ab65b7d5c505b4b83f0751d5b8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa7a5f4026112fef4830817779f204590f20cea60b32bd1033ee99040375e03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-ww999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:30Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.245535 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.245578 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.245587 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.245600 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.245610 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:30Z","lastTransitionTime":"2026-01-30T10:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.263069 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:30Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.275415 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:30Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.286478 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:30Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.297395 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:30Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.310564 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:30Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.321234 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:30Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.330642 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:30Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.342429 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:30Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.347541 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.347620 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.347633 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.347651 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.347683 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:30Z","lastTransitionTime":"2026-01-30T10:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.353858 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:30Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.366448 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:30Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.377732 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6654bc5cbb371c1a2362dee3aa8930ddf905299c3fe6cc1805e31c315b21024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:55:18Z\\\",\\\"message\\\":\\\"2026-01-30T10:54:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_30a5fbf8-7b5d-41e3-be9e-cb2964793ada\\\\n2026-01-30T10:54:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_30a5fbf8-7b5d-41e3-be9e-cb2964793ada to /host/opt/cni/bin/\\\\n2026-01-30T10:54:33Z [verbose] multus-daemon started\\\\n2026-01-30T10:54:33Z [verbose] Readiness Indicator file check\\\\n2026-01-30T10:55:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:30Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.387192 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:30Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.397817 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2krt6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35533ad8-7435-413d-bad1-05a0ca183c0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2krt6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:30Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.449944 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.449989 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.450001 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.450017 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.450026 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:30Z","lastTransitionTime":"2026-01-30T10:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.552649 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.552732 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.552745 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.552761 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.552769 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:30Z","lastTransitionTime":"2026-01-30T10:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.655797 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.655839 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.655849 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.655865 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.655874 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:30Z","lastTransitionTime":"2026-01-30T10:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.758678 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.758731 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.758740 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.758753 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.758762 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:30Z","lastTransitionTime":"2026-01-30T10:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.861588 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.861679 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.861693 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.861726 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.861762 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:30Z","lastTransitionTime":"2026-01-30T10:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.964787 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.964838 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.964850 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.964869 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:30 crc kubenswrapper[4869]: I0130 10:55:30.964884 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:30Z","lastTransitionTime":"2026-01-30T10:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.067877 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.067964 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.067989 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.068018 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.068039 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:31Z","lastTransitionTime":"2026-01-30T10:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.132816 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:31 crc kubenswrapper[4869]: E0130 10:55:31.133080 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.156407 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 21:48:08.200060357 +0000 UTC Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.173660 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.174235 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.174253 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.174277 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.174295 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:31Z","lastTransitionTime":"2026-01-30T10:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.278179 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.278254 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.278275 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.278301 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.278323 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:31Z","lastTransitionTime":"2026-01-30T10:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.381515 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.381579 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.381596 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.381623 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.381645 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:31Z","lastTransitionTime":"2026-01-30T10:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.485132 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.485179 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.485191 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.485215 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.485226 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:31Z","lastTransitionTime":"2026-01-30T10:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.588119 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.588184 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.588197 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.588220 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.588240 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:31Z","lastTransitionTime":"2026-01-30T10:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.690865 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.690919 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.690930 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.690944 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.690955 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:31Z","lastTransitionTime":"2026-01-30T10:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.794879 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.794958 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.794971 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.794992 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.795006 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:31Z","lastTransitionTime":"2026-01-30T10:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.897347 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.897407 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.897419 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.897439 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.897452 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:31Z","lastTransitionTime":"2026-01-30T10:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.999912 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.999964 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:31.999973 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:31 crc kubenswrapper[4869]: I0130 10:55:32.000002 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.000014 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:31Z","lastTransitionTime":"2026-01-30T10:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.103462 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.103563 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.103580 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.103613 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.103632 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:32Z","lastTransitionTime":"2026-01-30T10:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.132983 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.133043 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.133131 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:32 crc kubenswrapper[4869]: E0130 10:55:32.133300 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:32 crc kubenswrapper[4869]: E0130 10:55:32.133416 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:32 crc kubenswrapper[4869]: E0130 10:55:32.133507 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.156756 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 20:24:14.750390153 +0000 UTC Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.206936 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.206997 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.207020 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.207050 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.207074 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:32Z","lastTransitionTime":"2026-01-30T10:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.309128 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.309157 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.309166 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.309179 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.309192 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:32Z","lastTransitionTime":"2026-01-30T10:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.412419 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.412464 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.412475 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.412491 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.412501 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:32Z","lastTransitionTime":"2026-01-30T10:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.515535 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.515830 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.515859 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.515910 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.515931 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:32Z","lastTransitionTime":"2026-01-30T10:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.618911 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.619005 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.619028 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.619059 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.619083 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:32Z","lastTransitionTime":"2026-01-30T10:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.725204 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.725328 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.725371 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.725413 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.725437 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:32Z","lastTransitionTime":"2026-01-30T10:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.828876 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.828930 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.828942 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.828957 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.828969 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:32Z","lastTransitionTime":"2026-01-30T10:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.931595 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.931645 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.931659 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.931679 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:32 crc kubenswrapper[4869]: I0130 10:55:32.931696 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:32Z","lastTransitionTime":"2026-01-30T10:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.034057 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.034103 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.034127 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.034153 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.034166 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:33Z","lastTransitionTime":"2026-01-30T10:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.132365 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:33 crc kubenswrapper[4869]: E0130 10:55:33.132569 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.137000 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.137036 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.137046 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.137061 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.137070 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:33Z","lastTransitionTime":"2026-01-30T10:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.157645 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 19:54:02.972071876 +0000 UTC Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.239776 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.239835 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.239847 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.239867 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.239880 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:33Z","lastTransitionTime":"2026-01-30T10:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.344835 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.344884 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.344896 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.344916 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.344929 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:33Z","lastTransitionTime":"2026-01-30T10:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.448548 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.448608 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.448620 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.448644 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.448655 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:33Z","lastTransitionTime":"2026-01-30T10:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.552458 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.552546 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.552570 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.552607 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.552636 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:33Z","lastTransitionTime":"2026-01-30T10:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.655571 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.655610 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.655619 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.655633 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.655644 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:33Z","lastTransitionTime":"2026-01-30T10:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.758144 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.758188 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.758199 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.758218 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.758227 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:33Z","lastTransitionTime":"2026-01-30T10:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.862570 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.862646 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.862663 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.862693 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.862740 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:33Z","lastTransitionTime":"2026-01-30T10:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.967166 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.967229 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.967237 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.967254 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:33 crc kubenswrapper[4869]: I0130 10:55:33.967267 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:33Z","lastTransitionTime":"2026-01-30T10:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.007315 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:55:34 crc kubenswrapper[4869]: E0130 10:55:34.007658 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:38.007579916 +0000 UTC m=+148.557456042 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.007814 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.007882 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:34 crc kubenswrapper[4869]: E0130 10:55:34.008017 4869 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 10:55:34 crc kubenswrapper[4869]: E0130 10:55:34.008138 4869 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 10:55:34 crc kubenswrapper[4869]: E0130 10:55:34.008169 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 10:56:38.008136682 +0000 UTC m=+148.558012778 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 10:55:34 crc kubenswrapper[4869]: E0130 10:55:34.008241 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 10:56:38.008219984 +0000 UTC m=+148.558096120 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.071518 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.071603 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.071624 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.071654 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.071677 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:34Z","lastTransitionTime":"2026-01-30T10:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.108890 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.108993 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:34 crc kubenswrapper[4869]: E0130 10:55:34.109264 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 10:55:34 crc kubenswrapper[4869]: E0130 10:55:34.109280 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 10:55:34 crc kubenswrapper[4869]: E0130 10:55:34.109355 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 10:55:34 crc kubenswrapper[4869]: E0130 10:55:34.109380 4869 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:55:34 crc kubenswrapper[4869]: E0130 10:55:34.109299 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 10:55:34 crc kubenswrapper[4869]: E0130 10:55:34.109509 4869 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:55:34 crc kubenswrapper[4869]: E0130 10:55:34.109482 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-30 10:56:38.109450438 +0000 UTC m=+148.659326545 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:55:34 crc kubenswrapper[4869]: E0130 10:55:34.109653 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-30 10:56:38.109616073 +0000 UTC m=+148.659492219 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.132943 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.133052 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:34 crc kubenswrapper[4869]: E0130 10:55:34.133151 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.133053 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:34 crc kubenswrapper[4869]: E0130 10:55:34.133284 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:34 crc kubenswrapper[4869]: E0130 10:55:34.133372 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.158216 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 22:13:55.52305776 +0000 UTC Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.175144 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.175179 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.175189 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.175204 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.175220 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:34Z","lastTransitionTime":"2026-01-30T10:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.278139 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.278214 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.278233 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.278262 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.278285 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:34Z","lastTransitionTime":"2026-01-30T10:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.381206 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.381244 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.381252 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.381266 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.381276 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:34Z","lastTransitionTime":"2026-01-30T10:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.483031 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.483094 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.483106 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.483123 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.483134 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:34Z","lastTransitionTime":"2026-01-30T10:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.586689 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.586772 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.586782 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.586802 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.586812 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:34Z","lastTransitionTime":"2026-01-30T10:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.690095 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.690148 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.690163 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.690186 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.690201 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:34Z","lastTransitionTime":"2026-01-30T10:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.794123 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.794241 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.794271 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.794303 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.794327 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:34Z","lastTransitionTime":"2026-01-30T10:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.898302 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.898366 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.898379 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.898406 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:34 crc kubenswrapper[4869]: I0130 10:55:34.898425 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:34Z","lastTransitionTime":"2026-01-30T10:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.001338 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.001395 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.001409 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.001433 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.001446 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:35Z","lastTransitionTime":"2026-01-30T10:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.105350 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.105419 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.105446 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.105665 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.105699 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:35Z","lastTransitionTime":"2026-01-30T10:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.132391 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:35 crc kubenswrapper[4869]: E0130 10:55:35.132643 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.152170 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.158860 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 02:16:06.390051816 +0000 UTC Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.208638 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.208673 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.208683 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.208699 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.208722 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:35Z","lastTransitionTime":"2026-01-30T10:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.311734 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.311765 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.311773 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.311787 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.311797 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:35Z","lastTransitionTime":"2026-01-30T10:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.414567 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.414612 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.414623 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.414641 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.414655 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:35Z","lastTransitionTime":"2026-01-30T10:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.518036 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.518081 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.518092 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.518108 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.518119 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:35Z","lastTransitionTime":"2026-01-30T10:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.621300 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.621355 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.621370 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.621389 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.621401 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:35Z","lastTransitionTime":"2026-01-30T10:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.723808 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.723875 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.723894 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.723919 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.723938 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:35Z","lastTransitionTime":"2026-01-30T10:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.826502 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.826550 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.826567 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.826590 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.826601 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:35Z","lastTransitionTime":"2026-01-30T10:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.929477 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.929519 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.929531 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.929548 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:35 crc kubenswrapper[4869]: I0130 10:55:35.929561 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:35Z","lastTransitionTime":"2026-01-30T10:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.031944 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.032001 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.032024 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.032047 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.032061 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:36Z","lastTransitionTime":"2026-01-30T10:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.132276 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.132283 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.132305 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:36 crc kubenswrapper[4869]: E0130 10:55:36.132524 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:36 crc kubenswrapper[4869]: E0130 10:55:36.132631 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:36 crc kubenswrapper[4869]: E0130 10:55:36.132751 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.135565 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.135627 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.135646 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.135678 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.135701 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:36Z","lastTransitionTime":"2026-01-30T10:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.159836 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 02:14:19.955413452 +0000 UTC Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.238988 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.239101 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.239118 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.239142 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.239156 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:36Z","lastTransitionTime":"2026-01-30T10:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.342116 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.342201 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.342238 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.342276 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.342301 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:36Z","lastTransitionTime":"2026-01-30T10:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.445906 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.446012 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.446031 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.446094 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.446114 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:36Z","lastTransitionTime":"2026-01-30T10:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.550155 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.550247 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.550266 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.550295 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.550314 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:36Z","lastTransitionTime":"2026-01-30T10:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.653736 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.653867 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.653889 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.653918 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.653938 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:36Z","lastTransitionTime":"2026-01-30T10:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.756899 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.756944 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.756959 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.756978 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.756989 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:36Z","lastTransitionTime":"2026-01-30T10:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.859130 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.859177 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.859187 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.859203 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.859213 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:36Z","lastTransitionTime":"2026-01-30T10:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.961624 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.961667 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.961694 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.961735 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:36 crc kubenswrapper[4869]: I0130 10:55:36.961750 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:36Z","lastTransitionTime":"2026-01-30T10:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.064040 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.064106 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.064120 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.064136 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.064149 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:37Z","lastTransitionTime":"2026-01-30T10:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.132412 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:37 crc kubenswrapper[4869]: E0130 10:55:37.132563 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.160277 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 12:47:15.775623958 +0000 UTC Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.166135 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.166169 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.166182 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.166200 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.166210 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:37Z","lastTransitionTime":"2026-01-30T10:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.268803 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.268862 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.268876 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.268895 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.268908 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:37Z","lastTransitionTime":"2026-01-30T10:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.371162 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.371192 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.371200 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.371215 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.371223 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:37Z","lastTransitionTime":"2026-01-30T10:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.379153 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.379179 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.379188 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.379200 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.379208 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:37Z","lastTransitionTime":"2026-01-30T10:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:37 crc kubenswrapper[4869]: E0130 10:55:37.390925 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:37Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.395369 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.395475 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.395489 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.395509 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.395521 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:37Z","lastTransitionTime":"2026-01-30T10:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:37 crc kubenswrapper[4869]: E0130 10:55:37.409937 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:37Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.414054 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.414091 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.414104 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.414123 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.414135 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:37Z","lastTransitionTime":"2026-01-30T10:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:37 crc kubenswrapper[4869]: E0130 10:55:37.436526 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:37Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.441875 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.441914 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.441924 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.441942 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.441951 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:37Z","lastTransitionTime":"2026-01-30T10:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:37 crc kubenswrapper[4869]: E0130 10:55:37.459197 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:37Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.464456 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.464485 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.464494 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.464509 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.464519 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:37Z","lastTransitionTime":"2026-01-30T10:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:37 crc kubenswrapper[4869]: E0130 10:55:37.480132 4869 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b98afe0e-307b-429e-9e66-9fd57b577afd\\\",\\\"systemUUID\\\":\\\"901514b4-6dd1-4030-b41f-57ceacfed18b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:37Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:37 crc kubenswrapper[4869]: E0130 10:55:37.480247 4869 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.482236 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.482281 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.482294 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.482311 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.482323 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:37Z","lastTransitionTime":"2026-01-30T10:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.585609 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.585658 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.585670 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.585689 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.585700 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:37Z","lastTransitionTime":"2026-01-30T10:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.688375 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.688446 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.688470 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.688500 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.688522 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:37Z","lastTransitionTime":"2026-01-30T10:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.791271 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.791318 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.791330 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.791349 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.791362 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:37Z","lastTransitionTime":"2026-01-30T10:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.894932 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.895003 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.895018 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.895047 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.895067 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:37Z","lastTransitionTime":"2026-01-30T10:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.998043 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.998107 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.998120 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.998145 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:37 crc kubenswrapper[4869]: I0130 10:55:37.998162 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:37Z","lastTransitionTime":"2026-01-30T10:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.102591 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.102653 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.102670 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.102693 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.102726 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:38Z","lastTransitionTime":"2026-01-30T10:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.132299 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.132403 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:38 crc kubenswrapper[4869]: E0130 10:55:38.132501 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.132404 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:38 crc kubenswrapper[4869]: E0130 10:55:38.132602 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:38 crc kubenswrapper[4869]: E0130 10:55:38.132808 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.160791 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 01:59:21.711004482 +0000 UTC Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.204874 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.204906 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.204916 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.204929 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.204938 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:38Z","lastTransitionTime":"2026-01-30T10:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.307755 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.307800 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.307809 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.307825 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.307836 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:38Z","lastTransitionTime":"2026-01-30T10:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.411465 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.411552 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.411571 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.411600 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.411624 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:38Z","lastTransitionTime":"2026-01-30T10:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.514868 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.514934 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.514947 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.514966 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.514985 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:38Z","lastTransitionTime":"2026-01-30T10:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.618880 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.618951 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.618971 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.618998 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.619015 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:38Z","lastTransitionTime":"2026-01-30T10:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.721641 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.721696 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.721743 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.721766 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.721781 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:38Z","lastTransitionTime":"2026-01-30T10:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.825084 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.825160 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.825180 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.825204 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.825218 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:38Z","lastTransitionTime":"2026-01-30T10:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.928898 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.928975 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.928991 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.929020 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:38 crc kubenswrapper[4869]: I0130 10:55:38.929043 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:38Z","lastTransitionTime":"2026-01-30T10:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.033869 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.033942 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.033954 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.033979 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.033999 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:39Z","lastTransitionTime":"2026-01-30T10:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.132389 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:39 crc kubenswrapper[4869]: E0130 10:55:39.132674 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.133957 4869 scope.go:117] "RemoveContainer" containerID="32a26c6c526e3accc4bdba8be97b33df3cd756ca0405ab85d6b12e552e50cebe" Jan 30 10:55:39 crc kubenswrapper[4869]: E0130 10:55:39.134249 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-twvdq_openshift-ovn-kubernetes(3e4cac66-8338-46fe-8296-ce9dbd2257bd)\"" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.136877 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.136943 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.136971 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.137002 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.137027 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:39Z","lastTransitionTime":"2026-01-30T10:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.161782 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 15:54:20.97547109 +0000 UTC Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.239843 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.239906 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.239918 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.239942 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.239956 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:39Z","lastTransitionTime":"2026-01-30T10:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.343618 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.343657 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.343673 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.343694 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.343728 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:39Z","lastTransitionTime":"2026-01-30T10:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.447124 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.447187 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.447206 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.447225 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.447237 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:39Z","lastTransitionTime":"2026-01-30T10:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.550549 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.550619 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.550650 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.550700 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.550771 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:39Z","lastTransitionTime":"2026-01-30T10:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.654510 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.654572 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.654586 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.654612 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.654630 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:39Z","lastTransitionTime":"2026-01-30T10:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.757952 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.758047 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.758069 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.758104 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.758151 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:39Z","lastTransitionTime":"2026-01-30T10:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.861279 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.861419 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.861444 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.861476 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.861499 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:39Z","lastTransitionTime":"2026-01-30T10:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.964261 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.964334 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.964356 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.964387 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:39 crc kubenswrapper[4869]: I0130 10:55:39.964409 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:39Z","lastTransitionTime":"2026-01-30T10:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.068071 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.068155 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.068188 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.068221 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.068243 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:40Z","lastTransitionTime":"2026-01-30T10:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.132899 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.133266 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.133403 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:40 crc kubenswrapper[4869]: E0130 10:55:40.133688 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:40 crc kubenswrapper[4869]: E0130 10:55:40.133857 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:40 crc kubenswrapper[4869]: E0130 10:55:40.134077 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.154527 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4dlfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2dfbd43-97e4-4009-96e9-43abfe887630\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28aa0d582edd02bc74d53efcb79dd3610245c565209bb020dd60c16aae771241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bttx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4dlfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.162234 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 12:11:03.981472786 +0000 UTC Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.172400 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.172447 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.172458 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.172475 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.172486 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:40Z","lastTransitionTime":"2026-01-30T10:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.179992 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffdd2bb5-688f-4805-80df-8e5dcbf3e5aa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f89fca18fba1006019e787cf063d2e0023cbe0f74e611e435e2ebf7bbd83e57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bd5afa440172d14507d9afecbcc1ff035e1b29533b9a94d96416bbcc6864031\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5228ceebf2db665259f1658a9524867eb4e9a9beef07abd6820b577eb6a6bb25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://875a24678ce56534797f44221532efe7b6de183996b3242e56e79978def3eb71\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://befb890a07c38a871466a4b38a292ceaa7747527621a5ec791674f567a068d55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e526413ab8fdc9a410393181190666acd6672b080acc4d63bc74171ac862ed59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f8d873554b90a6790a0735f527ce7059cf988663920aa1fc2616dcbb3003718\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf454\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fqgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.204251 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e4cac66-8338-46fe-8296-ce9dbd2257bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a26c6c526e3accc4bdba8be97b33df3cd756ca0405ab85d6b12e552e50cebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32a26c6c526e3accc4bdba8be97b33df3cd756ca0405ab85d6b12e552e50cebe\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:55:26Z\\\",\\\"message\\\":\\\"ling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:26Z is after 2025-08-24T17:21:41Z]\\\\nI0130 10:55:26.912738 6954 obj_retry.go:303] Retry object setup: *v1.Pod openshift-dns/node-resolver-4dlfn\\\\nI0130 10:55:26.912691 6954 services_controller.go:434] Service openshift-console-operator/metrics retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{metrics openshift-console-operator e4559ce3-2d5a-470f-b8bf-4c8b054d2335 11843 0 2025-02-23 05:38:55 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[name:console-operator] map[capability.openshift.io/name:Console include.release.openshift.io/hypershift:true include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00740494f \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:55:26Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-twvdq_openshift-ovn-kubernetes(3e4cac66-8338-46fe-8296-ce9dbd2257bd)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wc2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.219460 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23455e82-301e-4eaa-9358-5f00c6840ca7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d40b6dbc7efd9d8c9011700f004948317ab65b7d5c505b4b83f0751d5b8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa7a5f4026112fef4830817779f204590f20cea60b32bd1033ee99040375e03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzkfw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-ww999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.235322 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2005ac0-7df6-450c-838b-b6ba5a72aa2c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad67c4547a877633a356fce068e9eff429db4dfeadb4d94d9cdde139ce31397d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a12ec9c8c077f1a071cf69ee1db019ea7d17d1b453d090f534d5eb5f7f9c36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a12ec9c8c077f1a071cf69ee1db019ea7d17d1b453d090f534d5eb5f7f9c36c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.260037 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d5ad7ff-c955-4a0b-97e8-99a2bb53eb83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948b98708e98f2e36997afd951c916074b57c107416a5f232cb39777aa9e6d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://353af9add62e2f76a52d09bda0a2e6f1f8493bdcb5c4b7bfab5bf4c39d85d73a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cecf2c5618a4e1ddcddd471c51aa200917468082d2640e92d1a0d0677335885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe129cd012019e369fe4079b37d9c0914f9fe227bc34d59ae70622b5de21685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bea63351a1e6a318b7845c58b316c5f6d8d5799c7ca6a1ca6ede6cb83edc262f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85d66947c0fe3da6ca1dc2fcb33ec3bb9a1330215501ef4dda260572e43c4c9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f1f038a0dd788326018188c097dcff5036921ddd73071ab02d5c1c3aebe25f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9304633f71861c7012376f3a494219cf5d21eebc3079c2593c3bcf345a0804fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.275663 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.275736 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.275750 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.275774 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.275793 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:40Z","lastTransitionTime":"2026-01-30T10:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.278519 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf30ffd7-2194-421d-baa2-f27e0dc8445e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"le observer\\\\nW0130 10:54:29.766012 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0130 10:54:29.766167 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 10:54:29.766954 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2387640870/tls.crt::/tmp/serving-cert-2387640870/tls.key\\\\\\\"\\\\nI0130 10:54:30.067289 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 10:54:30.070519 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 10:54:30.070558 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 10:54:30.070595 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 10:54:30.070641 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 10:54:30.117596 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 10:54:30.117635 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 10:54:30.117648 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 10:54:30.117652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 10:54:30.117656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 10:54:30.117660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 10:54:30.117656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 10:54:30.123055 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.292914 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.306654 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8e24fb9-3d61-4b61-a8c9-644caa0d2278\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50a00a6068bc09d883654f49b1faa6f13857962773f3d0e56c620267bb2a097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://938b0b975373a946b7ffc9967cb2b490d571422eae181d09a4d07ff7b64bf500\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a92e4b4cffdc5dfae23c270617f8959e865d434790f757786253bb0bb531a0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.319156 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef13186b-7f82-4025-97e3-d899be8c207f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b4e724f54458c678e15a85cad9fa91b184b968498ee10031d4b4b6646d4d1c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-99lr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.329579 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2ppc8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d7dd6692-0691-4b9f-8ba4-d76c0e423f0c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27101b9a4d113634832350bc7d3420bf4a12a24d6bac78f56b79d381c6343eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fv9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2ppc8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.340169 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2krt6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35533ad8-7435-413d-bad1-05a0ca183c0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-stx4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2krt6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.358051 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c87de6ff467f607b5183317b3540e74e44d34dc85ce48ddd00270ebab86fed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.372913 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.377824 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.377855 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.377864 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.377882 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.377892 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:40Z","lastTransitionTime":"2026-01-30T10:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.386599 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1278967f51273a4f114afc774d1c0ede9c1af88f88ec22b92a6bcd5d342db6ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.402824 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5jpbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02f48f89-74aa-48e8-930e-7a86f15de2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6654bc5cbb371c1a2362dee3aa8930ddf905299c3fe6cc1805e31c315b21024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T10:55:18Z\\\",\\\"message\\\":\\\"2026-01-30T10:54:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_30a5fbf8-7b5d-41e3-be9e-cb2964793ada\\\\n2026-01-30T10:54:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_30a5fbf8-7b5d-41e3-be9e-cb2964793ada to /host/opt/cni/bin/\\\\n2026-01-30T10:54:33Z [verbose] multus-daemon started\\\\n2026-01-30T10:54:33Z [verbose] Readiness Indicator file check\\\\n2026-01-30T10:55:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skz45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5jpbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.415051 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc99b1fe-d998-4cfd-8300-a5e01724e03f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00f64135d423694f9306b48b181b21d252331e3f944bc62a7d1b3e2bb9b43716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d2072b5b8d5ad65a1e4720492420f37fbd11e6769c1f64353def4a5ae88f452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a0c731e448df12bd903b8b254fdc31bd2ce2912ad7ae9fd6a03c405b9dcc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f2f81b2eabc5046a91547bd8ec3e9927869f195006ed11e01efca1d4b4223af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T10:54:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T10:54:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T10:54:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.428593 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.446932 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T10:54:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc9664da121d0a7fece657f8f99a1c7ba5aa79056c2e629cbf8451dfa1cbfcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f7acd1fd042d8cbd93f676677789697d1b8efee2dcfdcfc633272d48c4c959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T10:54:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T10:55:40Z is after 2025-08-24T17:21:41Z" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.479770 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.480198 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.480279 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.480453 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.480522 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:40Z","lastTransitionTime":"2026-01-30T10:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.583305 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.583375 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.583387 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.583406 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.583420 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:40Z","lastTransitionTime":"2026-01-30T10:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.686724 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.686760 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.686772 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.686786 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.686796 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:40Z","lastTransitionTime":"2026-01-30T10:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.788972 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.789021 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.789035 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.789052 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.789064 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:40Z","lastTransitionTime":"2026-01-30T10:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.891565 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.891608 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.891620 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.891636 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.891648 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:40Z","lastTransitionTime":"2026-01-30T10:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.994748 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.994807 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.994819 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.994844 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:40 crc kubenswrapper[4869]: I0130 10:55:40.994860 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:40Z","lastTransitionTime":"2026-01-30T10:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.098227 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.098344 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.098374 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.098411 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.098438 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:41Z","lastTransitionTime":"2026-01-30T10:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.132121 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:41 crc kubenswrapper[4869]: E0130 10:55:41.132291 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.163250 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 09:29:13.70353854 +0000 UTC Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.201166 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.201288 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.201305 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.201324 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.201336 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:41Z","lastTransitionTime":"2026-01-30T10:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.304557 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.304602 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.304633 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.304650 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.304659 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:41Z","lastTransitionTime":"2026-01-30T10:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.408143 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.408199 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.408212 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.408236 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.408250 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:41Z","lastTransitionTime":"2026-01-30T10:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.510608 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.510695 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.510749 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.510781 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.510802 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:41Z","lastTransitionTime":"2026-01-30T10:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.613879 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.613961 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.613985 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.614016 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.614040 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:41Z","lastTransitionTime":"2026-01-30T10:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.717680 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.717778 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.717789 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.717807 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.717817 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:41Z","lastTransitionTime":"2026-01-30T10:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.820946 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.821024 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.821044 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.821072 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.821097 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:41Z","lastTransitionTime":"2026-01-30T10:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.924846 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.924904 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.924918 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.924939 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:41 crc kubenswrapper[4869]: I0130 10:55:41.924957 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:41Z","lastTransitionTime":"2026-01-30T10:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.027373 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.027439 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.027451 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.027467 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.027478 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:42Z","lastTransitionTime":"2026-01-30T10:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.130301 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.130406 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.130423 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.130457 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.130475 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:42Z","lastTransitionTime":"2026-01-30T10:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.132607 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.132636 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.132610 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:42 crc kubenswrapper[4869]: E0130 10:55:42.132770 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:42 crc kubenswrapper[4869]: E0130 10:55:42.132796 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:42 crc kubenswrapper[4869]: E0130 10:55:42.132856 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.164391 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 06:09:04.825770683 +0000 UTC Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.233801 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.233843 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.233852 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.233868 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.233877 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:42Z","lastTransitionTime":"2026-01-30T10:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.336091 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.336214 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.336228 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.336245 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.336256 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:42Z","lastTransitionTime":"2026-01-30T10:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.439617 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.439676 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.439688 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.439732 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.439747 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:42Z","lastTransitionTime":"2026-01-30T10:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.542464 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.542510 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.542520 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.542533 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.542543 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:42Z","lastTransitionTime":"2026-01-30T10:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.646058 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.646105 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.646114 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.646135 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.646147 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:42Z","lastTransitionTime":"2026-01-30T10:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.749061 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.749373 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.749453 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.749558 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.749629 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:42Z","lastTransitionTime":"2026-01-30T10:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.852276 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.852622 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.852697 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.852824 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.852911 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:42Z","lastTransitionTime":"2026-01-30T10:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.956862 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.957242 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.957340 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.957433 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:42 crc kubenswrapper[4869]: I0130 10:55:42.957503 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:42Z","lastTransitionTime":"2026-01-30T10:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.060675 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.060835 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.060871 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.060911 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.060939 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:43Z","lastTransitionTime":"2026-01-30T10:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.132616 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:43 crc kubenswrapper[4869]: E0130 10:55:43.133143 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.164101 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.164157 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.164166 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.164181 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.164190 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:43Z","lastTransitionTime":"2026-01-30T10:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.164810 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 13:20:16.477392129 +0000 UTC Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.267273 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.267310 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.267318 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.267331 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.267338 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:43Z","lastTransitionTime":"2026-01-30T10:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.369640 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.369665 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.369672 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.369685 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.369694 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:43Z","lastTransitionTime":"2026-01-30T10:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.471667 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.471735 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.471744 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.471761 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.471794 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:43Z","lastTransitionTime":"2026-01-30T10:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.574606 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.574674 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.574686 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.574718 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.574729 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:43Z","lastTransitionTime":"2026-01-30T10:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.677498 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.677556 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.677592 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.677625 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.677647 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:43Z","lastTransitionTime":"2026-01-30T10:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.780587 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.780635 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.780651 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.780675 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.780690 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:43Z","lastTransitionTime":"2026-01-30T10:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.882953 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.883000 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.883018 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.883035 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.883053 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:43Z","lastTransitionTime":"2026-01-30T10:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.985771 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.985817 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.985831 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.985850 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:43 crc kubenswrapper[4869]: I0130 10:55:43.985863 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:43Z","lastTransitionTime":"2026-01-30T10:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.088794 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.088863 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.088880 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.088906 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.088926 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:44Z","lastTransitionTime":"2026-01-30T10:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.132576 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.132626 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:44 crc kubenswrapper[4869]: E0130 10:55:44.132720 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.132785 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:44 crc kubenswrapper[4869]: E0130 10:55:44.132937 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:44 crc kubenswrapper[4869]: E0130 10:55:44.133096 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.165481 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 14:47:18.256038461 +0000 UTC Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.190734 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.190780 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.190793 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.190810 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.190822 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:44Z","lastTransitionTime":"2026-01-30T10:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.293296 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.293342 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.293354 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.293371 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.293383 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:44Z","lastTransitionTime":"2026-01-30T10:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.395839 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.395872 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.395881 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.395893 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.395923 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:44Z","lastTransitionTime":"2026-01-30T10:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.499049 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.499151 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.499168 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.499189 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.499203 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:44Z","lastTransitionTime":"2026-01-30T10:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.602187 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.602222 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.602231 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.602245 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.602255 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:44Z","lastTransitionTime":"2026-01-30T10:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.705534 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.705579 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.705594 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.705619 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.705633 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:44Z","lastTransitionTime":"2026-01-30T10:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.808547 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.808640 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.808658 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.808689 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.808756 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:44Z","lastTransitionTime":"2026-01-30T10:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.912565 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.912645 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.912657 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.912678 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:44 crc kubenswrapper[4869]: I0130 10:55:44.912689 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:44Z","lastTransitionTime":"2026-01-30T10:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.016697 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.016810 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.016835 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.016871 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.016896 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:45Z","lastTransitionTime":"2026-01-30T10:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.120025 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.120067 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.120077 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.120093 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.120105 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:45Z","lastTransitionTime":"2026-01-30T10:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.132615 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:45 crc kubenswrapper[4869]: E0130 10:55:45.132840 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.166236 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 22:35:50.272115134 +0000 UTC Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.224505 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.224571 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.224593 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.224628 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.224649 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:45Z","lastTransitionTime":"2026-01-30T10:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.328742 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.328826 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.328839 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.328860 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.329301 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:45Z","lastTransitionTime":"2026-01-30T10:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.433451 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.433524 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.433545 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.433572 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.433643 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:45Z","lastTransitionTime":"2026-01-30T10:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.538564 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.539052 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.539064 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.539089 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.539103 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:45Z","lastTransitionTime":"2026-01-30T10:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.640935 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.640971 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.640982 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.640999 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.641010 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:45Z","lastTransitionTime":"2026-01-30T10:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.743477 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.743536 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.743546 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.743562 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.743589 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:45Z","lastTransitionTime":"2026-01-30T10:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.845965 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.846009 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.846020 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.846036 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.846048 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:45Z","lastTransitionTime":"2026-01-30T10:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.948150 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.948199 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.948209 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.948224 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:45 crc kubenswrapper[4869]: I0130 10:55:45.948235 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:45Z","lastTransitionTime":"2026-01-30T10:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.050701 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.050752 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.050764 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.050778 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.050790 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:46Z","lastTransitionTime":"2026-01-30T10:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.132407 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.132523 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.132563 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:46 crc kubenswrapper[4869]: E0130 10:55:46.132623 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:46 crc kubenswrapper[4869]: E0130 10:55:46.132794 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:46 crc kubenswrapper[4869]: E0130 10:55:46.132833 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.152507 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.152606 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.152623 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.152636 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.152648 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:46Z","lastTransitionTime":"2026-01-30T10:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.167100 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 13:15:56.546300727 +0000 UTC Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.254411 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.254436 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.254444 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.254458 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.254467 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:46Z","lastTransitionTime":"2026-01-30T10:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.357247 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.357285 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.357294 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.357308 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.357316 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:46Z","lastTransitionTime":"2026-01-30T10:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.460094 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.460147 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.460159 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.460177 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.460189 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:46Z","lastTransitionTime":"2026-01-30T10:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.562758 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.562799 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.562807 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.562821 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.562830 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:46Z","lastTransitionTime":"2026-01-30T10:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.665305 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.665335 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.665342 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.665356 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.665374 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:46Z","lastTransitionTime":"2026-01-30T10:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.767699 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.767755 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.767772 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.767790 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.767802 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:46Z","lastTransitionTime":"2026-01-30T10:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.870382 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.870466 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.870485 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.870518 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.870542 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:46Z","lastTransitionTime":"2026-01-30T10:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.972814 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.972857 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.972867 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.972886 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:46 crc kubenswrapper[4869]: I0130 10:55:46.972898 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:46Z","lastTransitionTime":"2026-01-30T10:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.075430 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.075477 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.075488 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.075502 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.075512 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:47Z","lastTransitionTime":"2026-01-30T10:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.132772 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:47 crc kubenswrapper[4869]: E0130 10:55:47.133112 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.168122 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 07:15:48.468431299 +0000 UTC Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.177569 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.177600 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.177608 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.177623 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.177634 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:47Z","lastTransitionTime":"2026-01-30T10:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.279685 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.279736 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.279746 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.279761 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.279772 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:47Z","lastTransitionTime":"2026-01-30T10:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.382464 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.382525 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.382540 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.382561 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.382575 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:47Z","lastTransitionTime":"2026-01-30T10:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.484426 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.484482 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.484493 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.484510 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.484523 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:47Z","lastTransitionTime":"2026-01-30T10:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.587568 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.587625 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.587633 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.587653 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.587662 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:47Z","lastTransitionTime":"2026-01-30T10:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.689685 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.689752 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.689762 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.689776 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.689800 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:47Z","lastTransitionTime":"2026-01-30T10:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.791877 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.791963 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.791972 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.791987 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.791996 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:47Z","lastTransitionTime":"2026-01-30T10:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.878839 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.878916 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.878928 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.878944 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.878955 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:47Z","lastTransitionTime":"2026-01-30T10:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.898367 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.898392 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.898400 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.898430 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.898440 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T10:55:47Z","lastTransitionTime":"2026-01-30T10:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.922594 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-j8xh8"] Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.923052 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j8xh8" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.925339 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.925725 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.926117 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.926139 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.966614 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fb0160e1-515b-4417-ba25-c8d031be5f87-service-ca\") pod \"cluster-version-operator-5c965bbfc6-j8xh8\" (UID: \"fb0160e1-515b-4417-ba25-c8d031be5f87\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j8xh8" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.966681 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/fb0160e1-515b-4417-ba25-c8d031be5f87-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-j8xh8\" (UID: \"fb0160e1-515b-4417-ba25-c8d031be5f87\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j8xh8" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.966738 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fb0160e1-515b-4417-ba25-c8d031be5f87-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-j8xh8\" (UID: \"fb0160e1-515b-4417-ba25-c8d031be5f87\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j8xh8" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.966926 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fb0160e1-515b-4417-ba25-c8d031be5f87-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-j8xh8\" (UID: \"fb0160e1-515b-4417-ba25-c8d031be5f87\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j8xh8" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.966989 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/fb0160e1-515b-4417-ba25-c8d031be5f87-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-j8xh8\" (UID: \"fb0160e1-515b-4417-ba25-c8d031be5f87\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j8xh8" Jan 30 10:55:47 crc kubenswrapper[4869]: I0130 10:55:47.968200 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-6fqgt" podStartSLOduration=77.968159458 podStartE2EDuration="1m17.968159458s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:55:47.942670074 +0000 UTC m=+98.492546140" watchObservedRunningTime="2026-01-30 10:55:47.968159458 +0000 UTC m=+98.518035524" Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.002248 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=13.002225742 podStartE2EDuration="13.002225742s" podCreationTimestamp="2026-01-30 10:55:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:55:48.002207252 +0000 UTC m=+98.552083318" watchObservedRunningTime="2026-01-30 10:55:48.002225742 +0000 UTC m=+98.552101808" Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.002366 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-ww999" podStartSLOduration=78.002362526 podStartE2EDuration="1m18.002362526s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:55:47.983263369 +0000 UTC m=+98.533139455" watchObservedRunningTime="2026-01-30 10:55:48.002362526 +0000 UTC m=+98.552238592" Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.031014 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=75.030998252 podStartE2EDuration="1m15.030998252s" podCreationTimestamp="2026-01-30 10:54:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:55:48.029753296 +0000 UTC m=+98.579629382" watchObservedRunningTime="2026-01-30 10:55:48.030998252 +0000 UTC m=+98.580874318" Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.047363 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=78.047349939 podStartE2EDuration="1m18.047349939s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:55:48.047148843 +0000 UTC m=+98.597024919" watchObservedRunningTime="2026-01-30 10:55:48.047349939 +0000 UTC m=+98.597226005" Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.067768 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fb0160e1-515b-4417-ba25-c8d031be5f87-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-j8xh8\" (UID: \"fb0160e1-515b-4417-ba25-c8d031be5f87\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j8xh8" Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.067841 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fb0160e1-515b-4417-ba25-c8d031be5f87-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-j8xh8\" (UID: \"fb0160e1-515b-4417-ba25-c8d031be5f87\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j8xh8" Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.067884 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/fb0160e1-515b-4417-ba25-c8d031be5f87-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-j8xh8\" (UID: \"fb0160e1-515b-4417-ba25-c8d031be5f87\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j8xh8" Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.067910 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fb0160e1-515b-4417-ba25-c8d031be5f87-service-ca\") pod \"cluster-version-operator-5c965bbfc6-j8xh8\" (UID: \"fb0160e1-515b-4417-ba25-c8d031be5f87\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j8xh8" Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.067933 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/fb0160e1-515b-4417-ba25-c8d031be5f87-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-j8xh8\" (UID: \"fb0160e1-515b-4417-ba25-c8d031be5f87\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j8xh8" Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.067980 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/fb0160e1-515b-4417-ba25-c8d031be5f87-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-j8xh8\" (UID: \"fb0160e1-515b-4417-ba25-c8d031be5f87\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j8xh8" Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.068071 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/fb0160e1-515b-4417-ba25-c8d031be5f87-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-j8xh8\" (UID: \"fb0160e1-515b-4417-ba25-c8d031be5f87\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j8xh8" Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.068770 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fb0160e1-515b-4417-ba25-c8d031be5f87-service-ca\") pod \"cluster-version-operator-5c965bbfc6-j8xh8\" (UID: \"fb0160e1-515b-4417-ba25-c8d031be5f87\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j8xh8" Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.075179 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fb0160e1-515b-4417-ba25-c8d031be5f87-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-j8xh8\" (UID: \"fb0160e1-515b-4417-ba25-c8d031be5f87\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j8xh8" Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.087428 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-4dlfn" podStartSLOduration=78.087406448 podStartE2EDuration="1m18.087406448s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:55:48.069273279 +0000 UTC m=+98.619149345" watchObservedRunningTime="2026-01-30 10:55:48.087406448 +0000 UTC m=+98.637282514" Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.089274 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fb0160e1-515b-4417-ba25-c8d031be5f87-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-j8xh8\" (UID: \"fb0160e1-515b-4417-ba25-c8d031be5f87\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j8xh8" Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.105453 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=79.105426534 podStartE2EDuration="1m19.105426534s" podCreationTimestamp="2026-01-30 10:54:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:55:48.087567033 +0000 UTC m=+98.637443099" watchObservedRunningTime="2026-01-30 10:55:48.105426534 +0000 UTC m=+98.655302600" Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.105885 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podStartSLOduration=78.105878037 podStartE2EDuration="1m18.105878037s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:55:48.105582998 +0000 UTC m=+98.655459074" watchObservedRunningTime="2026-01-30 10:55:48.105878037 +0000 UTC m=+98.655754103" Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.132813 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.132869 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:48 crc kubenswrapper[4869]: E0130 10:55:48.132918 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.132813 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:48 crc kubenswrapper[4869]: E0130 10:55:48.133006 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:48 crc kubenswrapper[4869]: E0130 10:55:48.133104 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.168505 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 06:26:03.57033771 +0000 UTC Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.168564 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.174869 4869 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.187661 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-5jpbv" podStartSLOduration=78.187643333 podStartE2EDuration="1m18.187643333s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:55:48.179077873 +0000 UTC m=+98.728953939" watchObservedRunningTime="2026-01-30 10:55:48.187643333 +0000 UTC m=+98.737519399" Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.200454 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-2ppc8" podStartSLOduration=78.200435297 podStartE2EDuration="1m18.200435297s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:55:48.188000274 +0000 UTC m=+98.737876340" watchObservedRunningTime="2026-01-30 10:55:48.200435297 +0000 UTC m=+98.750311363" Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.213345 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=51.213329973 podStartE2EDuration="51.213329973s" podCreationTimestamp="2026-01-30 10:54:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:55:48.200597031 +0000 UTC m=+98.750473097" watchObservedRunningTime="2026-01-30 10:55:48.213329973 +0000 UTC m=+98.763206039" Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.236942 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j8xh8" Jan 30 10:55:48 crc kubenswrapper[4869]: W0130 10:55:48.249781 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfb0160e1_515b_4417_ba25_c8d031be5f87.slice/crio-89bfda2f6ecd3982de7586d1ee810d5daa0021ca68a79be96d849e3ca7411549 WatchSource:0}: Error finding container 89bfda2f6ecd3982de7586d1ee810d5daa0021ca68a79be96d849e3ca7411549: Status 404 returned error can't find the container with id 89bfda2f6ecd3982de7586d1ee810d5daa0021ca68a79be96d849e3ca7411549 Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.672158 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs\") pod \"network-metrics-daemon-2krt6\" (UID: \"35533ad8-7435-413d-bad1-05a0ca183c0d\") " pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:48 crc kubenswrapper[4869]: E0130 10:55:48.672361 4869 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 10:55:48 crc kubenswrapper[4869]: E0130 10:55:48.672622 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs podName:35533ad8-7435-413d-bad1-05a0ca183c0d nodeName:}" failed. No retries permitted until 2026-01-30 10:56:52.672606406 +0000 UTC m=+163.222482472 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs") pod "network-metrics-daemon-2krt6" (UID: "35533ad8-7435-413d-bad1-05a0ca183c0d") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.722570 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j8xh8" event={"ID":"fb0160e1-515b-4417-ba25-c8d031be5f87","Type":"ContainerStarted","Data":"a7c249828956205db67f681c2e13d37888ee228858fb666de58c508346c42f89"} Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.722620 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j8xh8" event={"ID":"fb0160e1-515b-4417-ba25-c8d031be5f87","Type":"ContainerStarted","Data":"89bfda2f6ecd3982de7586d1ee810d5daa0021ca68a79be96d849e3ca7411549"} Jan 30 10:55:48 crc kubenswrapper[4869]: I0130 10:55:48.741975 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j8xh8" podStartSLOduration=78.74193609 podStartE2EDuration="1m18.74193609s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:55:48.741872888 +0000 UTC m=+99.291748964" watchObservedRunningTime="2026-01-30 10:55:48.74193609 +0000 UTC m=+99.291812196" Jan 30 10:55:49 crc kubenswrapper[4869]: I0130 10:55:49.132019 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:49 crc kubenswrapper[4869]: E0130 10:55:49.132176 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:50 crc kubenswrapper[4869]: I0130 10:55:50.132939 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:50 crc kubenswrapper[4869]: E0130 10:55:50.133796 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:50 crc kubenswrapper[4869]: I0130 10:55:50.133973 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:50 crc kubenswrapper[4869]: I0130 10:55:50.133985 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:50 crc kubenswrapper[4869]: E0130 10:55:50.134029 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:50 crc kubenswrapper[4869]: E0130 10:55:50.134130 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:51 crc kubenswrapper[4869]: I0130 10:55:51.132238 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:51 crc kubenswrapper[4869]: E0130 10:55:51.132676 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:52 crc kubenswrapper[4869]: I0130 10:55:52.132191 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:52 crc kubenswrapper[4869]: I0130 10:55:52.132301 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:52 crc kubenswrapper[4869]: E0130 10:55:52.132324 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:52 crc kubenswrapper[4869]: I0130 10:55:52.132216 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:52 crc kubenswrapper[4869]: E0130 10:55:52.132469 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:52 crc kubenswrapper[4869]: E0130 10:55:52.132562 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:52 crc kubenswrapper[4869]: I0130 10:55:52.133752 4869 scope.go:117] "RemoveContainer" containerID="32a26c6c526e3accc4bdba8be97b33df3cd756ca0405ab85d6b12e552e50cebe" Jan 30 10:55:52 crc kubenswrapper[4869]: E0130 10:55:52.133981 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-twvdq_openshift-ovn-kubernetes(3e4cac66-8338-46fe-8296-ce9dbd2257bd)\"" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" Jan 30 10:55:53 crc kubenswrapper[4869]: I0130 10:55:53.132768 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:53 crc kubenswrapper[4869]: E0130 10:55:53.132934 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:54 crc kubenswrapper[4869]: I0130 10:55:54.132215 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:54 crc kubenswrapper[4869]: I0130 10:55:54.132264 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:54 crc kubenswrapper[4869]: I0130 10:55:54.132264 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:54 crc kubenswrapper[4869]: E0130 10:55:54.132436 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:54 crc kubenswrapper[4869]: E0130 10:55:54.132554 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:54 crc kubenswrapper[4869]: E0130 10:55:54.132630 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:55 crc kubenswrapper[4869]: I0130 10:55:55.132361 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:55 crc kubenswrapper[4869]: E0130 10:55:55.132477 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:56 crc kubenswrapper[4869]: I0130 10:55:56.132869 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:56 crc kubenswrapper[4869]: I0130 10:55:56.132959 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:56 crc kubenswrapper[4869]: I0130 10:55:56.133026 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:56 crc kubenswrapper[4869]: E0130 10:55:56.133109 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:56 crc kubenswrapper[4869]: E0130 10:55:56.133323 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:56 crc kubenswrapper[4869]: E0130 10:55:56.133488 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:57 crc kubenswrapper[4869]: I0130 10:55:57.132780 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:57 crc kubenswrapper[4869]: E0130 10:55:57.132922 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:55:58 crc kubenswrapper[4869]: I0130 10:55:58.132744 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:55:58 crc kubenswrapper[4869]: I0130 10:55:58.132843 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:55:58 crc kubenswrapper[4869]: E0130 10:55:58.132911 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:55:58 crc kubenswrapper[4869]: I0130 10:55:58.132932 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:55:58 crc kubenswrapper[4869]: E0130 10:55:58.132996 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:55:58 crc kubenswrapper[4869]: E0130 10:55:58.133041 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:55:59 crc kubenswrapper[4869]: I0130 10:55:59.132471 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:55:59 crc kubenswrapper[4869]: E0130 10:55:59.132582 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:56:00 crc kubenswrapper[4869]: I0130 10:56:00.132331 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:56:00 crc kubenswrapper[4869]: I0130 10:56:00.133563 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:56:00 crc kubenswrapper[4869]: E0130 10:56:00.133676 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:56:00 crc kubenswrapper[4869]: I0130 10:56:00.133741 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:56:00 crc kubenswrapper[4869]: E0130 10:56:00.133863 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:56:00 crc kubenswrapper[4869]: E0130 10:56:00.133917 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:56:01 crc kubenswrapper[4869]: I0130 10:56:01.132467 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:56:01 crc kubenswrapper[4869]: E0130 10:56:01.132603 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:56:02 crc kubenswrapper[4869]: I0130 10:56:02.133009 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:56:02 crc kubenswrapper[4869]: I0130 10:56:02.133108 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:56:02 crc kubenswrapper[4869]: E0130 10:56:02.133139 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:56:02 crc kubenswrapper[4869]: I0130 10:56:02.133171 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:56:02 crc kubenswrapper[4869]: E0130 10:56:02.133274 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:56:02 crc kubenswrapper[4869]: E0130 10:56:02.133363 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:56:03 crc kubenswrapper[4869]: I0130 10:56:03.132867 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:56:03 crc kubenswrapper[4869]: E0130 10:56:03.133832 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:56:04 crc kubenswrapper[4869]: I0130 10:56:04.133000 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:56:04 crc kubenswrapper[4869]: I0130 10:56:04.133794 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:56:04 crc kubenswrapper[4869]: I0130 10:56:04.134134 4869 scope.go:117] "RemoveContainer" containerID="32a26c6c526e3accc4bdba8be97b33df3cd756ca0405ab85d6b12e552e50cebe" Jan 30 10:56:04 crc kubenswrapper[4869]: I0130 10:56:04.134333 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:56:04 crc kubenswrapper[4869]: E0130 10:56:04.134381 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:56:04 crc kubenswrapper[4869]: E0130 10:56:04.134526 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:56:04 crc kubenswrapper[4869]: E0130 10:56:04.134679 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:56:04 crc kubenswrapper[4869]: E0130 10:56:04.134941 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-twvdq_openshift-ovn-kubernetes(3e4cac66-8338-46fe-8296-ce9dbd2257bd)\"" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" Jan 30 10:56:04 crc kubenswrapper[4869]: I0130 10:56:04.769488 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5jpbv_02f48f89-74aa-48e8-930e-7a86f15de2de/kube-multus/1.log" Jan 30 10:56:04 crc kubenswrapper[4869]: I0130 10:56:04.769966 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5jpbv_02f48f89-74aa-48e8-930e-7a86f15de2de/kube-multus/0.log" Jan 30 10:56:04 crc kubenswrapper[4869]: I0130 10:56:04.770011 4869 generic.go:334] "Generic (PLEG): container finished" podID="02f48f89-74aa-48e8-930e-7a86f15de2de" containerID="e6654bc5cbb371c1a2362dee3aa8930ddf905299c3fe6cc1805e31c315b21024" exitCode=1 Jan 30 10:56:04 crc kubenswrapper[4869]: I0130 10:56:04.770044 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5jpbv" event={"ID":"02f48f89-74aa-48e8-930e-7a86f15de2de","Type":"ContainerDied","Data":"e6654bc5cbb371c1a2362dee3aa8930ddf905299c3fe6cc1805e31c315b21024"} Jan 30 10:56:04 crc kubenswrapper[4869]: I0130 10:56:04.770075 4869 scope.go:117] "RemoveContainer" containerID="0e1ef63fadb9c8e6a7808f931483e74c674ac6ba59fd6c4b7cd82a78a601346f" Jan 30 10:56:04 crc kubenswrapper[4869]: I0130 10:56:04.770523 4869 scope.go:117] "RemoveContainer" containerID="e6654bc5cbb371c1a2362dee3aa8930ddf905299c3fe6cc1805e31c315b21024" Jan 30 10:56:04 crc kubenswrapper[4869]: E0130 10:56:04.770740 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-5jpbv_openshift-multus(02f48f89-74aa-48e8-930e-7a86f15de2de)\"" pod="openshift-multus/multus-5jpbv" podUID="02f48f89-74aa-48e8-930e-7a86f15de2de" Jan 30 10:56:05 crc kubenswrapper[4869]: I0130 10:56:05.132401 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:56:05 crc kubenswrapper[4869]: E0130 10:56:05.132555 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:56:05 crc kubenswrapper[4869]: I0130 10:56:05.774337 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5jpbv_02f48f89-74aa-48e8-930e-7a86f15de2de/kube-multus/1.log" Jan 30 10:56:06 crc kubenswrapper[4869]: I0130 10:56:06.132689 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:56:06 crc kubenswrapper[4869]: I0130 10:56:06.132767 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:56:06 crc kubenswrapper[4869]: E0130 10:56:06.132915 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:56:06 crc kubenswrapper[4869]: I0130 10:56:06.133069 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:56:06 crc kubenswrapper[4869]: E0130 10:56:06.133266 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:56:06 crc kubenswrapper[4869]: E0130 10:56:06.133381 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:56:07 crc kubenswrapper[4869]: I0130 10:56:07.132600 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:56:07 crc kubenswrapper[4869]: E0130 10:56:07.132811 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:56:08 crc kubenswrapper[4869]: I0130 10:56:08.132530 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:56:08 crc kubenswrapper[4869]: I0130 10:56:08.132621 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:56:08 crc kubenswrapper[4869]: I0130 10:56:08.132576 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:56:08 crc kubenswrapper[4869]: E0130 10:56:08.132810 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:56:08 crc kubenswrapper[4869]: E0130 10:56:08.132929 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:56:08 crc kubenswrapper[4869]: E0130 10:56:08.133057 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:56:09 crc kubenswrapper[4869]: I0130 10:56:09.132017 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:56:09 crc kubenswrapper[4869]: E0130 10:56:09.132219 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:56:10 crc kubenswrapper[4869]: I0130 10:56:10.132855 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:56:10 crc kubenswrapper[4869]: I0130 10:56:10.132876 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:56:10 crc kubenswrapper[4869]: I0130 10:56:10.132919 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:56:10 crc kubenswrapper[4869]: E0130 10:56:10.133832 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:56:10 crc kubenswrapper[4869]: E0130 10:56:10.133934 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:56:10 crc kubenswrapper[4869]: E0130 10:56:10.134076 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:56:10 crc kubenswrapper[4869]: E0130 10:56:10.178674 4869 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Jan 30 10:56:10 crc kubenswrapper[4869]: E0130 10:56:10.235797 4869 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 30 10:56:11 crc kubenswrapper[4869]: I0130 10:56:11.132893 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:56:11 crc kubenswrapper[4869]: E0130 10:56:11.133056 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:56:12 crc kubenswrapper[4869]: I0130 10:56:12.132873 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:56:12 crc kubenswrapper[4869]: I0130 10:56:12.132912 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:56:12 crc kubenswrapper[4869]: I0130 10:56:12.132883 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:56:12 crc kubenswrapper[4869]: E0130 10:56:12.133039 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:56:12 crc kubenswrapper[4869]: E0130 10:56:12.133146 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:56:12 crc kubenswrapper[4869]: E0130 10:56:12.133266 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:56:13 crc kubenswrapper[4869]: I0130 10:56:13.132100 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:56:13 crc kubenswrapper[4869]: E0130 10:56:13.132268 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:56:14 crc kubenswrapper[4869]: I0130 10:56:14.132791 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:56:14 crc kubenswrapper[4869]: I0130 10:56:14.132869 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:56:14 crc kubenswrapper[4869]: E0130 10:56:14.132945 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:56:14 crc kubenswrapper[4869]: I0130 10:56:14.132868 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:56:14 crc kubenswrapper[4869]: E0130 10:56:14.133099 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:56:14 crc kubenswrapper[4869]: E0130 10:56:14.133249 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:56:15 crc kubenswrapper[4869]: I0130 10:56:15.132490 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:56:15 crc kubenswrapper[4869]: E0130 10:56:15.132962 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:56:15 crc kubenswrapper[4869]: E0130 10:56:15.237449 4869 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 30 10:56:16 crc kubenswrapper[4869]: I0130 10:56:16.132621 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:56:16 crc kubenswrapper[4869]: I0130 10:56:16.132691 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:56:16 crc kubenswrapper[4869]: E0130 10:56:16.132807 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:56:16 crc kubenswrapper[4869]: I0130 10:56:16.132686 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:56:16 crc kubenswrapper[4869]: E0130 10:56:16.133225 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:56:16 crc kubenswrapper[4869]: E0130 10:56:16.132889 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:56:16 crc kubenswrapper[4869]: I0130 10:56:16.133693 4869 scope.go:117] "RemoveContainer" containerID="32a26c6c526e3accc4bdba8be97b33df3cd756ca0405ab85d6b12e552e50cebe" Jan 30 10:56:16 crc kubenswrapper[4869]: I0130 10:56:16.812069 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvdq_3e4cac66-8338-46fe-8296-ce9dbd2257bd/ovnkube-controller/3.log" Jan 30 10:56:16 crc kubenswrapper[4869]: I0130 10:56:16.814934 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerStarted","Data":"0714f38753af86dcac165d4d7e8e420136a54d01e7cceb1047f66d3da6caaba9"} Jan 30 10:56:16 crc kubenswrapper[4869]: I0130 10:56:16.815396 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:56:16 crc kubenswrapper[4869]: I0130 10:56:16.842653 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" podStartSLOduration=106.842637782 podStartE2EDuration="1m46.842637782s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:16.842472538 +0000 UTC m=+127.392348604" watchObservedRunningTime="2026-01-30 10:56:16.842637782 +0000 UTC m=+127.392513848" Jan 30 10:56:17 crc kubenswrapper[4869]: I0130 10:56:17.064141 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-2krt6"] Jan 30 10:56:17 crc kubenswrapper[4869]: I0130 10:56:17.064249 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:56:17 crc kubenswrapper[4869]: E0130 10:56:17.064404 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:56:17 crc kubenswrapper[4869]: I0130 10:56:17.132702 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:56:17 crc kubenswrapper[4869]: E0130 10:56:17.132832 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:56:18 crc kubenswrapper[4869]: I0130 10:56:18.131994 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:56:18 crc kubenswrapper[4869]: I0130 10:56:18.131994 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:56:18 crc kubenswrapper[4869]: E0130 10:56:18.132388 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:56:18 crc kubenswrapper[4869]: E0130 10:56:18.132449 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:56:19 crc kubenswrapper[4869]: I0130 10:56:19.132836 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:56:19 crc kubenswrapper[4869]: I0130 10:56:19.132881 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:56:19 crc kubenswrapper[4869]: E0130 10:56:19.132997 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:56:19 crc kubenswrapper[4869]: E0130 10:56:19.133078 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:56:20 crc kubenswrapper[4869]: I0130 10:56:20.132128 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:56:20 crc kubenswrapper[4869]: E0130 10:56:20.133309 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:56:20 crc kubenswrapper[4869]: I0130 10:56:20.133338 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:56:20 crc kubenswrapper[4869]: E0130 10:56:20.133508 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:56:20 crc kubenswrapper[4869]: I0130 10:56:20.133623 4869 scope.go:117] "RemoveContainer" containerID="e6654bc5cbb371c1a2362dee3aa8930ddf905299c3fe6cc1805e31c315b21024" Jan 30 10:56:20 crc kubenswrapper[4869]: E0130 10:56:20.239187 4869 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 30 10:56:20 crc kubenswrapper[4869]: I0130 10:56:20.829050 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5jpbv_02f48f89-74aa-48e8-930e-7a86f15de2de/kube-multus/1.log" Jan 30 10:56:20 crc kubenswrapper[4869]: I0130 10:56:20.829104 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5jpbv" event={"ID":"02f48f89-74aa-48e8-930e-7a86f15de2de","Type":"ContainerStarted","Data":"789b440a13044433df32646c52cdc72df74090c4be9a71b1135371073ef0683d"} Jan 30 10:56:21 crc kubenswrapper[4869]: I0130 10:56:21.132621 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:56:21 crc kubenswrapper[4869]: I0130 10:56:21.132641 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:56:21 crc kubenswrapper[4869]: E0130 10:56:21.132761 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:56:21 crc kubenswrapper[4869]: E0130 10:56:21.132807 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:56:22 crc kubenswrapper[4869]: I0130 10:56:22.134817 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:56:22 crc kubenswrapper[4869]: I0130 10:56:22.134836 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:56:22 crc kubenswrapper[4869]: E0130 10:56:22.135466 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:56:22 crc kubenswrapper[4869]: E0130 10:56:22.135611 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:56:23 crc kubenswrapper[4869]: I0130 10:56:23.132191 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:56:23 crc kubenswrapper[4869]: E0130 10:56:23.132534 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:56:23 crc kubenswrapper[4869]: I0130 10:56:23.132240 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:56:23 crc kubenswrapper[4869]: E0130 10:56:23.132701 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:56:24 crc kubenswrapper[4869]: I0130 10:56:24.132392 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:56:24 crc kubenswrapper[4869]: I0130 10:56:24.132484 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:56:24 crc kubenswrapper[4869]: E0130 10:56:24.132509 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 10:56:24 crc kubenswrapper[4869]: E0130 10:56:24.132637 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 10:56:25 crc kubenswrapper[4869]: I0130 10:56:25.132897 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:56:25 crc kubenswrapper[4869]: E0130 10:56:25.133174 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2krt6" podUID="35533ad8-7435-413d-bad1-05a0ca183c0d" Jan 30 10:56:25 crc kubenswrapper[4869]: I0130 10:56:25.133939 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:56:25 crc kubenswrapper[4869]: E0130 10:56:25.134265 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 10:56:26 crc kubenswrapper[4869]: I0130 10:56:26.133021 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:56:26 crc kubenswrapper[4869]: I0130 10:56:26.133258 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:56:26 crc kubenswrapper[4869]: I0130 10:56:26.137549 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 30 10:56:26 crc kubenswrapper[4869]: I0130 10:56:26.137850 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 30 10:56:27 crc kubenswrapper[4869]: I0130 10:56:27.132933 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:56:27 crc kubenswrapper[4869]: I0130 10:56:27.132932 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:56:27 crc kubenswrapper[4869]: I0130 10:56:27.135456 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 30 10:56:27 crc kubenswrapper[4869]: I0130 10:56:27.136183 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 30 10:56:27 crc kubenswrapper[4869]: I0130 10:56:27.136211 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 30 10:56:27 crc kubenswrapper[4869]: I0130 10:56:27.136427 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.785945 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.836054 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-tnth8"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.836876 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.841237 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.843700 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.843806 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.846261 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-tt6p5"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.851750 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-tt6p5" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.852857 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-n5r98"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.866079 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8khhj"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.866412 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n5r98" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.866883 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-8khhj" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.867242 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j8fvx"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.867342 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.867363 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.867802 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j8fvx" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.867971 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.868306 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.868633 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.868605 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.868757 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-xdsnd"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.869021 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.869092 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.869025 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-xdsnd" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.869372 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.870835 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jkrt8"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.872482 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.873445 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.873875 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dqfj8"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.874497 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-cdspc"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.874693 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jkrt8" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.875405 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.877606 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.877797 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.877971 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.878043 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.878158 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.878015 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.878307 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.878774 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.879459 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.879770 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.879995 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.880247 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.882979 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.884423 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-cdspc" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.888676 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-dznqv"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.889375 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-pr488"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.897654 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.901273 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-vxcwv"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.901786 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.902002 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.903448 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vxcwv" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.911320 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.911667 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.911870 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-g46p9"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.912304 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-z8qjp"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.912371 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.912515 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.912618 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.912642 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ctbj2"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.912813 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.912910 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.913036 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.913042 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.912567 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.913131 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.913208 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ctbj2" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.913244 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-g46p9" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.913254 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.913351 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.913416 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.913864 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.914020 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-tnth8"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.914047 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qgkdr"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.914116 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.930516 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.935676 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-dvjw7"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.936336 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8hs7b"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.937076 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qfdm9"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.941631 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qgkdr" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.944049 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-dvjw7" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.944612 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8hs7b" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.944993 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.951264 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.951661 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.951953 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.952136 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.952408 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.952968 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.953339 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.953414 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.953497 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.960060 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.960539 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.960957 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.961103 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.953532 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.953576 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.953598 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.945019 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.953678 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.962652 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.962810 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.962874 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.962929 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.962932 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.963006 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.963050 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.962809 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.963221 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.963487 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.963624 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.963645 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-4z8pc"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.963674 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.963857 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.964013 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.964110 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.964199 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s22tp"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.964223 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.964409 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.964530 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-55vfj"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.964602 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.964832 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-fn57m"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.968330 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-mx9wm"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.968655 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-fn57m" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.968922 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-grv7k"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.964954 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4z8pc" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.965184 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.964904 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-55vfj" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.965573 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.964928 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qfdm9" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.970440 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mx9wm" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.974117 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-g9lcg"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.974928 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ndht4"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.964979 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s22tp" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.975253 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-g9lcg" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.975868 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-grv7k" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.976132 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xr2sj"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.976644 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xr2sj" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.976854 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ndht4" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.985072 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f81e19a-bac5-4dd5-a294-1a026d0b9cd6-serving-cert\") pod \"apiserver-7bbb656c7d-z5mpb\" (UID: \"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.985121 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8btpx\" (UniqueName: \"kubernetes.io/projected/1f81e19a-bac5-4dd5-a294-1a026d0b9cd6-kube-api-access-8btpx\") pod \"apiserver-7bbb656c7d-z5mpb\" (UID: \"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.985144 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1f81e19a-bac5-4dd5-a294-1a026d0b9cd6-etcd-client\") pod \"apiserver-7bbb656c7d-z5mpb\" (UID: \"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.985167 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3cc1509a-74f3-4f56-9742-d8c9e57359d4-serving-cert\") pod \"authentication-operator-69f744f599-tt6p5\" (UID: \"3cc1509a-74f3-4f56-9742-d8c9e57359d4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tt6p5" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.985189 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5637ae84-f53a-48d5-87c6-1de13d92c181-trusted-ca\") pod \"console-operator-58897d9998-xdsnd\" (UID: \"5637ae84-f53a-48d5-87c6-1de13d92c181\") " pod="openshift-console-operator/console-operator-58897d9998-xdsnd" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.985226 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/92b8f00c-4b15-49a1-ac91-aef68b07cb74-metrics-tls\") pod \"dns-operator-744455d44c-8khhj\" (UID: \"92b8f00c-4b15-49a1-ac91-aef68b07cb74\") " pod="openshift-dns-operator/dns-operator-744455d44c-8khhj" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.985247 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3de467a9-b50e-4af7-816d-c346960a39af-client-ca\") pod \"route-controller-manager-6576b87f9c-fqgwq\" (UID: \"3de467a9-b50e-4af7-816d-c346960a39af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.985272 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3cc1509a-74f3-4f56-9742-d8c9e57359d4-service-ca-bundle\") pod \"authentication-operator-69f744f599-tt6p5\" (UID: \"3cc1509a-74f3-4f56-9742-d8c9e57359d4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tt6p5" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.985294 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1f81e19a-bac5-4dd5-a294-1a026d0b9cd6-audit-dir\") pod \"apiserver-7bbb656c7d-z5mpb\" (UID: \"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.985319 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vq9p7\" (UniqueName: \"kubernetes.io/projected/4a057949-f846-40b0-bc8e-66c8c8d0d1d4-kube-api-access-vq9p7\") pod \"cluster-samples-operator-665b6dd947-jkrt8\" (UID: \"4a057949-f846-40b0-bc8e-66c8c8d0d1d4\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jkrt8" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.985338 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plwrm\" (UniqueName: \"kubernetes.io/projected/92b8f00c-4b15-49a1-ac91-aef68b07cb74-kube-api-access-plwrm\") pod \"dns-operator-744455d44c-8khhj\" (UID: \"92b8f00c-4b15-49a1-ac91-aef68b07cb74\") " pod="openshift-dns-operator/dns-operator-744455d44c-8khhj" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.985366 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1f81e19a-bac5-4dd5-a294-1a026d0b9cd6-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-z5mpb\" (UID: \"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.985384 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpdc2\" (UniqueName: \"kubernetes.io/projected/3cc1509a-74f3-4f56-9742-d8c9e57359d4-kube-api-access-vpdc2\") pod \"authentication-operator-69f744f599-tt6p5\" (UID: \"3cc1509a-74f3-4f56-9742-d8c9e57359d4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tt6p5" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.985401 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zr2ml\" (UniqueName: \"kubernetes.io/projected/04600a3e-ea6a-4828-bf49-4f97a92f2f4d-kube-api-access-zr2ml\") pod \"machine-api-operator-5694c8668f-cdspc\" (UID: \"04600a3e-ea6a-4828-bf49-4f97a92f2f4d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-cdspc" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.985546 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjwz9\" (UniqueName: \"kubernetes.io/projected/740c30a1-6a0c-479f-9f11-62c969da6044-kube-api-access-gjwz9\") pod \"openshift-apiserver-operator-796bbdcf4f-ctbj2\" (UID: \"740c30a1-6a0c-479f-9f11-62c969da6044\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ctbj2" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.985618 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3cc1509a-74f3-4f56-9742-d8c9e57359d4-config\") pod \"authentication-operator-69f744f599-tt6p5\" (UID: \"3cc1509a-74f3-4f56-9742-d8c9e57359d4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tt6p5" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.985658 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5637ae84-f53a-48d5-87c6-1de13d92c181-serving-cert\") pod \"console-operator-58897d9998-xdsnd\" (UID: \"5637ae84-f53a-48d5-87c6-1de13d92c181\") " pod="openshift-console-operator/console-operator-58897d9998-xdsnd" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.985690 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d256fa3-7889-415c-8e01-0b43802365a5-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-qgkdr\" (UID: \"7d256fa3-7889-415c-8e01-0b43802365a5\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qgkdr" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.985756 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc978c96-efc2-4963-af6b-bd987cb81bed-available-featuregates\") pod \"openshift-config-operator-7777fb866f-vxcwv\" (UID: \"bc978c96-efc2-4963-af6b-bd987cb81bed\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vxcwv" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.985792 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/86d09af6-3f55-44c6-a7c4-feef5a08a1fb-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-j8fvx\" (UID: \"86d09af6-3f55-44c6-a7c4-feef5a08a1fb\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j8fvx" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.985822 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/86d09af6-3f55-44c6-a7c4-feef5a08a1fb-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-j8fvx\" (UID: \"86d09af6-3f55-44c6-a7c4-feef5a08a1fb\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j8fvx" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.985852 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc978c96-efc2-4963-af6b-bd987cb81bed-serving-cert\") pod \"openshift-config-operator-7777fb866f-vxcwv\" (UID: \"bc978c96-efc2-4963-af6b-bd987cb81bed\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vxcwv" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.985884 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3de467a9-b50e-4af7-816d-c346960a39af-config\") pod \"route-controller-manager-6576b87f9c-fqgwq\" (UID: \"3de467a9-b50e-4af7-816d-c346960a39af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.985924 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/4a057949-f846-40b0-bc8e-66c8c8d0d1d4-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-jkrt8\" (UID: \"4a057949-f846-40b0-bc8e-66c8c8d0d1d4\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jkrt8" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.985970 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3cc1509a-74f3-4f56-9742-d8c9e57359d4-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-tt6p5\" (UID: \"3cc1509a-74f3-4f56-9742-d8c9e57359d4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tt6p5" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.986017 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3de467a9-b50e-4af7-816d-c346960a39af-serving-cert\") pod \"route-controller-manager-6576b87f9c-fqgwq\" (UID: \"3de467a9-b50e-4af7-816d-c346960a39af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.986051 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5637ae84-f53a-48d5-87c6-1de13d92c181-config\") pod \"console-operator-58897d9998-xdsnd\" (UID: \"5637ae84-f53a-48d5-87c6-1de13d92c181\") " pod="openshift-console-operator/console-operator-58897d9998-xdsnd" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.986079 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04600a3e-ea6a-4828-bf49-4f97a92f2f4d-config\") pod \"machine-api-operator-5694c8668f-cdspc\" (UID: \"04600a3e-ea6a-4828-bf49-4f97a92f2f4d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-cdspc" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.986106 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/04600a3e-ea6a-4828-bf49-4f97a92f2f4d-images\") pod \"machine-api-operator-5694c8668f-cdspc\" (UID: \"04600a3e-ea6a-4828-bf49-4f97a92f2f4d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-cdspc" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.986135 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d256fa3-7889-415c-8e01-0b43802365a5-config\") pod \"kube-apiserver-operator-766d6c64bb-qgkdr\" (UID: \"7d256fa3-7889-415c-8e01-0b43802365a5\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qgkdr" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.986188 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qrd6\" (UniqueName: \"kubernetes.io/projected/bc978c96-efc2-4963-af6b-bd987cb81bed-kube-api-access-4qrd6\") pod \"openshift-config-operator-7777fb866f-vxcwv\" (UID: \"bc978c96-efc2-4963-af6b-bd987cb81bed\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vxcwv" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.986216 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/04600a3e-ea6a-4828-bf49-4f97a92f2f4d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-cdspc\" (UID: \"04600a3e-ea6a-4828-bf49-4f97a92f2f4d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-cdspc" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.986260 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/740c30a1-6a0c-479f-9f11-62c969da6044-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-ctbj2\" (UID: \"740c30a1-6a0c-479f-9f11-62c969da6044\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ctbj2" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.986380 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wbpq\" (UniqueName: \"kubernetes.io/projected/86d09af6-3f55-44c6-a7c4-feef5a08a1fb-kube-api-access-5wbpq\") pod \"cluster-image-registry-operator-dc59b4c8b-j8fvx\" (UID: \"86d09af6-3f55-44c6-a7c4-feef5a08a1fb\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j8fvx" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.986417 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7d256fa3-7889-415c-8e01-0b43802365a5-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-qgkdr\" (UID: \"7d256fa3-7889-415c-8e01-0b43802365a5\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qgkdr" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.986456 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1f81e19a-bac5-4dd5-a294-1a026d0b9cd6-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-z5mpb\" (UID: \"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.986488 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1f81e19a-bac5-4dd5-a294-1a026d0b9cd6-audit-policies\") pod \"apiserver-7bbb656c7d-z5mpb\" (UID: \"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.986518 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1f81e19a-bac5-4dd5-a294-1a026d0b9cd6-encryption-config\") pod \"apiserver-7bbb656c7d-z5mpb\" (UID: \"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.986570 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pd5xr\" (UniqueName: \"kubernetes.io/projected/3de467a9-b50e-4af7-816d-c346960a39af-kube-api-access-pd5xr\") pod \"route-controller-manager-6576b87f9c-fqgwq\" (UID: \"3de467a9-b50e-4af7-816d-c346960a39af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.986592 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/86d09af6-3f55-44c6-a7c4-feef5a08a1fb-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-j8fvx\" (UID: \"86d09af6-3f55-44c6-a7c4-feef5a08a1fb\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j8fvx" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.986612 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p252w\" (UniqueName: \"kubernetes.io/projected/5637ae84-f53a-48d5-87c6-1de13d92c181-kube-api-access-p252w\") pod \"console-operator-58897d9998-xdsnd\" (UID: \"5637ae84-f53a-48d5-87c6-1de13d92c181\") " pod="openshift-console-operator/console-operator-58897d9998-xdsnd" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.986638 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/740c30a1-6a0c-479f-9f11-62c969da6044-config\") pod \"openshift-apiserver-operator-796bbdcf4f-ctbj2\" (UID: \"740c30a1-6a0c-479f-9f11-62c969da6044\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ctbj2" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.989536 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.989645 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.989536 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.989929 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.990088 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.990178 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.990193 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.990307 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.990437 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.990608 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.990686 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.990687 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.990875 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.991029 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.993761 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rkwmf"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.994463 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2frn4"] Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.994967 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.994967 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2frn4" Jan 30 10:56:28 crc kubenswrapper[4869]: I0130 10:56:28.996960 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.000359 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.000769 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.006977 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.010401 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.011352 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.013598 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.014988 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.017044 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.017435 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.029311 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.030278 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.030270 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xxd67"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.031913 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-rwgbc"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.031988 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.032295 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xxd67" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.032414 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rwgbc" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.032571 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs2xw"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.033235 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs2xw" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.033478 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f2zrw"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.034169 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f2zrw" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.034434 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-br4ps"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.035019 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-br4ps" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.036048 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496165-hwsc7"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.036903 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496165-hwsc7" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.037002 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-k5cpz"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.037643 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-k5cpz" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.037987 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-tt6p5"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.038826 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.039031 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8khhj"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.040102 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j8fvx"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.041360 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jkrt8"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.042680 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dqfj8"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.043835 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-dznqv"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.044817 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-pr488"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.045945 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.047462 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.051814 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-xdsnd"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.057763 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8hs7b"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.058895 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.062663 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-vxcwv"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.063224 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-z8qjp"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.064754 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ndht4"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.065909 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-cdspc"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.068808 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-4z8pc"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.071444 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xr2sj"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.074077 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ctbj2"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.079107 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.080706 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xxd67"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.083412 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-g46p9"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.085152 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-fn57m"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087365 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qrd6\" (UniqueName: \"kubernetes.io/projected/bc978c96-efc2-4963-af6b-bd987cb81bed-kube-api-access-4qrd6\") pod \"openshift-config-operator-7777fb866f-vxcwv\" (UID: \"bc978c96-efc2-4963-af6b-bd987cb81bed\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vxcwv" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087403 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/04600a3e-ea6a-4828-bf49-4f97a92f2f4d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-cdspc\" (UID: \"04600a3e-ea6a-4828-bf49-4f97a92f2f4d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-cdspc" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087443 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wbpq\" (UniqueName: \"kubernetes.io/projected/86d09af6-3f55-44c6-a7c4-feef5a08a1fb-kube-api-access-5wbpq\") pod \"cluster-image-registry-operator-dc59b4c8b-j8fvx\" (UID: \"86d09af6-3f55-44c6-a7c4-feef5a08a1fb\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j8fvx" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087462 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7d256fa3-7889-415c-8e01-0b43802365a5-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-qgkdr\" (UID: \"7d256fa3-7889-415c-8e01-0b43802365a5\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qgkdr" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087484 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/740c30a1-6a0c-479f-9f11-62c969da6044-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-ctbj2\" (UID: \"740c30a1-6a0c-479f-9f11-62c969da6044\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ctbj2" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087506 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1f81e19a-bac5-4dd5-a294-1a026d0b9cd6-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-z5mpb\" (UID: \"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087526 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1f81e19a-bac5-4dd5-a294-1a026d0b9cd6-audit-policies\") pod \"apiserver-7bbb656c7d-z5mpb\" (UID: \"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087548 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1f81e19a-bac5-4dd5-a294-1a026d0b9cd6-encryption-config\") pod \"apiserver-7bbb656c7d-z5mpb\" (UID: \"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087568 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pd5xr\" (UniqueName: \"kubernetes.io/projected/3de467a9-b50e-4af7-816d-c346960a39af-kube-api-access-pd5xr\") pod \"route-controller-manager-6576b87f9c-fqgwq\" (UID: \"3de467a9-b50e-4af7-816d-c346960a39af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087590 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/86d09af6-3f55-44c6-a7c4-feef5a08a1fb-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-j8fvx\" (UID: \"86d09af6-3f55-44c6-a7c4-feef5a08a1fb\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j8fvx" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087613 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p252w\" (UniqueName: \"kubernetes.io/projected/5637ae84-f53a-48d5-87c6-1de13d92c181-kube-api-access-p252w\") pod \"console-operator-58897d9998-xdsnd\" (UID: \"5637ae84-f53a-48d5-87c6-1de13d92c181\") " pod="openshift-console-operator/console-operator-58897d9998-xdsnd" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087633 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/740c30a1-6a0c-479f-9f11-62c969da6044-config\") pod \"openshift-apiserver-operator-796bbdcf4f-ctbj2\" (UID: \"740c30a1-6a0c-479f-9f11-62c969da6044\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ctbj2" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087653 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f81e19a-bac5-4dd5-a294-1a026d0b9cd6-serving-cert\") pod \"apiserver-7bbb656c7d-z5mpb\" (UID: \"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087672 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8btpx\" (UniqueName: \"kubernetes.io/projected/1f81e19a-bac5-4dd5-a294-1a026d0b9cd6-kube-api-access-8btpx\") pod \"apiserver-7bbb656c7d-z5mpb\" (UID: \"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087695 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1f81e19a-bac5-4dd5-a294-1a026d0b9cd6-etcd-client\") pod \"apiserver-7bbb656c7d-z5mpb\" (UID: \"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087736 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3cc1509a-74f3-4f56-9742-d8c9e57359d4-serving-cert\") pod \"authentication-operator-69f744f599-tt6p5\" (UID: \"3cc1509a-74f3-4f56-9742-d8c9e57359d4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tt6p5" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087782 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5637ae84-f53a-48d5-87c6-1de13d92c181-trusted-ca\") pod \"console-operator-58897d9998-xdsnd\" (UID: \"5637ae84-f53a-48d5-87c6-1de13d92c181\") " pod="openshift-console-operator/console-operator-58897d9998-xdsnd" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087804 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/92b8f00c-4b15-49a1-ac91-aef68b07cb74-metrics-tls\") pod \"dns-operator-744455d44c-8khhj\" (UID: \"92b8f00c-4b15-49a1-ac91-aef68b07cb74\") " pod="openshift-dns-operator/dns-operator-744455d44c-8khhj" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087824 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3de467a9-b50e-4af7-816d-c346960a39af-client-ca\") pod \"route-controller-manager-6576b87f9c-fqgwq\" (UID: \"3de467a9-b50e-4af7-816d-c346960a39af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087845 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1f81e19a-bac5-4dd5-a294-1a026d0b9cd6-audit-dir\") pod \"apiserver-7bbb656c7d-z5mpb\" (UID: \"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087865 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3cc1509a-74f3-4f56-9742-d8c9e57359d4-service-ca-bundle\") pod \"authentication-operator-69f744f599-tt6p5\" (UID: \"3cc1509a-74f3-4f56-9742-d8c9e57359d4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tt6p5" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087888 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vq9p7\" (UniqueName: \"kubernetes.io/projected/4a057949-f846-40b0-bc8e-66c8c8d0d1d4-kube-api-access-vq9p7\") pod \"cluster-samples-operator-665b6dd947-jkrt8\" (UID: \"4a057949-f846-40b0-bc8e-66c8c8d0d1d4\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jkrt8" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087909 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plwrm\" (UniqueName: \"kubernetes.io/projected/92b8f00c-4b15-49a1-ac91-aef68b07cb74-kube-api-access-plwrm\") pod \"dns-operator-744455d44c-8khhj\" (UID: \"92b8f00c-4b15-49a1-ac91-aef68b07cb74\") " pod="openshift-dns-operator/dns-operator-744455d44c-8khhj" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087940 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1f81e19a-bac5-4dd5-a294-1a026d0b9cd6-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-z5mpb\" (UID: \"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087961 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpdc2\" (UniqueName: \"kubernetes.io/projected/3cc1509a-74f3-4f56-9742-d8c9e57359d4-kube-api-access-vpdc2\") pod \"authentication-operator-69f744f599-tt6p5\" (UID: \"3cc1509a-74f3-4f56-9742-d8c9e57359d4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tt6p5" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087986 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zr2ml\" (UniqueName: \"kubernetes.io/projected/04600a3e-ea6a-4828-bf49-4f97a92f2f4d-kube-api-access-zr2ml\") pod \"machine-api-operator-5694c8668f-cdspc\" (UID: \"04600a3e-ea6a-4828-bf49-4f97a92f2f4d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-cdspc" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.088010 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjwz9\" (UniqueName: \"kubernetes.io/projected/740c30a1-6a0c-479f-9f11-62c969da6044-kube-api-access-gjwz9\") pod \"openshift-apiserver-operator-796bbdcf4f-ctbj2\" (UID: \"740c30a1-6a0c-479f-9f11-62c969da6044\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ctbj2" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.088031 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3cc1509a-74f3-4f56-9742-d8c9e57359d4-config\") pod \"authentication-operator-69f744f599-tt6p5\" (UID: \"3cc1509a-74f3-4f56-9742-d8c9e57359d4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tt6p5" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.088052 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5637ae84-f53a-48d5-87c6-1de13d92c181-serving-cert\") pod \"console-operator-58897d9998-xdsnd\" (UID: \"5637ae84-f53a-48d5-87c6-1de13d92c181\") " pod="openshift-console-operator/console-operator-58897d9998-xdsnd" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.088071 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d256fa3-7889-415c-8e01-0b43802365a5-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-qgkdr\" (UID: \"7d256fa3-7889-415c-8e01-0b43802365a5\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qgkdr" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.088093 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc978c96-efc2-4963-af6b-bd987cb81bed-available-featuregates\") pod \"openshift-config-operator-7777fb866f-vxcwv\" (UID: \"bc978c96-efc2-4963-af6b-bd987cb81bed\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vxcwv" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.088117 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/86d09af6-3f55-44c6-a7c4-feef5a08a1fb-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-j8fvx\" (UID: \"86d09af6-3f55-44c6-a7c4-feef5a08a1fb\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j8fvx" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.088139 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/86d09af6-3f55-44c6-a7c4-feef5a08a1fb-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-j8fvx\" (UID: \"86d09af6-3f55-44c6-a7c4-feef5a08a1fb\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j8fvx" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.088158 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc978c96-efc2-4963-af6b-bd987cb81bed-serving-cert\") pod \"openshift-config-operator-7777fb866f-vxcwv\" (UID: \"bc978c96-efc2-4963-af6b-bd987cb81bed\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vxcwv" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.088177 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3de467a9-b50e-4af7-816d-c346960a39af-config\") pod \"route-controller-manager-6576b87f9c-fqgwq\" (UID: \"3de467a9-b50e-4af7-816d-c346960a39af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.088198 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/4a057949-f846-40b0-bc8e-66c8c8d0d1d4-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-jkrt8\" (UID: \"4a057949-f846-40b0-bc8e-66c8c8d0d1d4\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jkrt8" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.088218 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3cc1509a-74f3-4f56-9742-d8c9e57359d4-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-tt6p5\" (UID: \"3cc1509a-74f3-4f56-9742-d8c9e57359d4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tt6p5" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.088240 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3de467a9-b50e-4af7-816d-c346960a39af-serving-cert\") pod \"route-controller-manager-6576b87f9c-fqgwq\" (UID: \"3de467a9-b50e-4af7-816d-c346960a39af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.088260 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5637ae84-f53a-48d5-87c6-1de13d92c181-config\") pod \"console-operator-58897d9998-xdsnd\" (UID: \"5637ae84-f53a-48d5-87c6-1de13d92c181\") " pod="openshift-console-operator/console-operator-58897d9998-xdsnd" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.088283 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04600a3e-ea6a-4828-bf49-4f97a92f2f4d-config\") pod \"machine-api-operator-5694c8668f-cdspc\" (UID: \"04600a3e-ea6a-4828-bf49-4f97a92f2f4d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-cdspc" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.088302 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/04600a3e-ea6a-4828-bf49-4f97a92f2f4d-images\") pod \"machine-api-operator-5694c8668f-cdspc\" (UID: \"04600a3e-ea6a-4828-bf49-4f97a92f2f4d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-cdspc" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.088324 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d256fa3-7889-415c-8e01-0b43802365a5-config\") pod \"kube-apiserver-operator-766d6c64bb-qgkdr\" (UID: \"7d256fa3-7889-415c-8e01-0b43802365a5\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qgkdr" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.089171 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1f81e19a-bac5-4dd5-a294-1a026d0b9cd6-audit-policies\") pod \"apiserver-7bbb656c7d-z5mpb\" (UID: \"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.090316 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc978c96-efc2-4963-af6b-bd987cb81bed-available-featuregates\") pod \"openshift-config-operator-7777fb866f-vxcwv\" (UID: \"bc978c96-efc2-4963-af6b-bd987cb81bed\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vxcwv" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.090498 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1f81e19a-bac5-4dd5-a294-1a026d0b9cd6-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-z5mpb\" (UID: \"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.091589 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3de467a9-b50e-4af7-816d-c346960a39af-config\") pod \"route-controller-manager-6576b87f9c-fqgwq\" (UID: \"3de467a9-b50e-4af7-816d-c346960a39af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.091613 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1f81e19a-bac5-4dd5-a294-1a026d0b9cd6-audit-dir\") pod \"apiserver-7bbb656c7d-z5mpb\" (UID: \"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.087943 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-br4ps"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.092356 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qfdm9"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.092666 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1f81e19a-bac5-4dd5-a294-1a026d0b9cd6-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-z5mpb\" (UID: \"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.088791 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/740c30a1-6a0c-479f-9f11-62c969da6044-config\") pod \"openshift-apiserver-operator-796bbdcf4f-ctbj2\" (UID: \"740c30a1-6a0c-479f-9f11-62c969da6044\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ctbj2" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.093233 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5637ae84-f53a-48d5-87c6-1de13d92c181-trusted-ca\") pod \"console-operator-58897d9998-xdsnd\" (UID: \"5637ae84-f53a-48d5-87c6-1de13d92c181\") " pod="openshift-console-operator/console-operator-58897d9998-xdsnd" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.094441 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3de467a9-b50e-4af7-816d-c346960a39af-client-ca\") pod \"route-controller-manager-6576b87f9c-fqgwq\" (UID: \"3de467a9-b50e-4af7-816d-c346960a39af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.094567 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5637ae84-f53a-48d5-87c6-1de13d92c181-config\") pod \"console-operator-58897d9998-xdsnd\" (UID: \"5637ae84-f53a-48d5-87c6-1de13d92c181\") " pod="openshift-console-operator/console-operator-58897d9998-xdsnd" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.108737 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/86d09af6-3f55-44c6-a7c4-feef5a08a1fb-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-j8fvx\" (UID: \"86d09af6-3f55-44c6-a7c4-feef5a08a1fb\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j8fvx" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.109531 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/04600a3e-ea6a-4828-bf49-4f97a92f2f4d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-cdspc\" (UID: \"04600a3e-ea6a-4828-bf49-4f97a92f2f4d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-cdspc" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.110204 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/4a057949-f846-40b0-bc8e-66c8c8d0d1d4-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-jkrt8\" (UID: \"4a057949-f846-40b0-bc8e-66c8c8d0d1d4\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jkrt8" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.112305 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3de467a9-b50e-4af7-816d-c346960a39af-serving-cert\") pod \"route-controller-manager-6576b87f9c-fqgwq\" (UID: \"3de467a9-b50e-4af7-816d-c346960a39af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.112684 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3cc1509a-74f3-4f56-9742-d8c9e57359d4-config\") pod \"authentication-operator-69f744f599-tt6p5\" (UID: \"3cc1509a-74f3-4f56-9742-d8c9e57359d4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tt6p5" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.112819 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3cc1509a-74f3-4f56-9742-d8c9e57359d4-service-ca-bundle\") pod \"authentication-operator-69f744f599-tt6p5\" (UID: \"3cc1509a-74f3-4f56-9742-d8c9e57359d4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tt6p5" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.113882 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3cc1509a-74f3-4f56-9742-d8c9e57359d4-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-tt6p5\" (UID: \"3cc1509a-74f3-4f56-9742-d8c9e57359d4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tt6p5" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.114277 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/04600a3e-ea6a-4828-bf49-4f97a92f2f4d-images\") pod \"machine-api-operator-5694c8668f-cdspc\" (UID: \"04600a3e-ea6a-4828-bf49-4f97a92f2f4d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-cdspc" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.114550 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f81e19a-bac5-4dd5-a294-1a026d0b9cd6-serving-cert\") pod \"apiserver-7bbb656c7d-z5mpb\" (UID: \"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.114777 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc978c96-efc2-4963-af6b-bd987cb81bed-serving-cert\") pod \"openshift-config-operator-7777fb866f-vxcwv\" (UID: \"bc978c96-efc2-4963-af6b-bd987cb81bed\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vxcwv" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.114849 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f2zrw"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.115042 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04600a3e-ea6a-4828-bf49-4f97a92f2f4d-config\") pod \"machine-api-operator-5694c8668f-cdspc\" (UID: \"04600a3e-ea6a-4828-bf49-4f97a92f2f4d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-cdspc" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.115088 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.115731 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5637ae84-f53a-48d5-87c6-1de13d92c181-serving-cert\") pod \"console-operator-58897d9998-xdsnd\" (UID: \"5637ae84-f53a-48d5-87c6-1de13d92c181\") " pod="openshift-console-operator/console-operator-58897d9998-xdsnd" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.116050 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1f81e19a-bac5-4dd5-a294-1a026d0b9cd6-etcd-client\") pod \"apiserver-7bbb656c7d-z5mpb\" (UID: \"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.116247 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3cc1509a-74f3-4f56-9742-d8c9e57359d4-serving-cert\") pod \"authentication-operator-69f744f599-tt6p5\" (UID: \"3cc1509a-74f3-4f56-9742-d8c9e57359d4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tt6p5" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.116353 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1f81e19a-bac5-4dd5-a294-1a026d0b9cd6-encryption-config\") pod \"apiserver-7bbb656c7d-z5mpb\" (UID: \"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.117233 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-mx9wm"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.117809 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/92b8f00c-4b15-49a1-ac91-aef68b07cb74-metrics-tls\") pod \"dns-operator-744455d44c-8khhj\" (UID: \"92b8f00c-4b15-49a1-ac91-aef68b07cb74\") " pod="openshift-dns-operator/dns-operator-744455d44c-8khhj" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.119521 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d256fa3-7889-415c-8e01-0b43802365a5-config\") pod \"kube-apiserver-operator-766d6c64bb-qgkdr\" (UID: \"7d256fa3-7889-415c-8e01-0b43802365a5\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qgkdr" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.124510 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.126121 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2frn4"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.128447 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/86d09af6-3f55-44c6-a7c4-feef5a08a1fb-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-j8fvx\" (UID: \"86d09af6-3f55-44c6-a7c4-feef5a08a1fb\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j8fvx" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.132006 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s22tp"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.132128 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-vplc5"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.136278 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d256fa3-7889-415c-8e01-0b43802365a5-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-qgkdr\" (UID: \"7d256fa3-7889-415c-8e01-0b43802365a5\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qgkdr" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.136299 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/740c30a1-6a0c-479f-9f11-62c969da6044-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-ctbj2\" (UID: \"740c30a1-6a0c-479f-9f11-62c969da6044\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ctbj2" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.139680 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qgkdr"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.139858 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-vplc5" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.139947 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-grv7k"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.141229 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.145289 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs2xw"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.145330 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-rwgbc"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.148567 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rkwmf"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.153718 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-k5cpz"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.153802 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-55vfj"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.156564 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-g9lcg"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.157930 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496165-hwsc7"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.160189 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.161244 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-tntbs"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.163955 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-tntbs" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.164882 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-djqwl"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.166623 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-djqwl" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.170766 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-djqwl"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.172299 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-tntbs"] Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.183000 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.205223 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.222861 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.240554 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.260053 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.279853 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.299982 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.320071 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.339870 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.359413 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.379088 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.399456 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.421595 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.446025 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.459501 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.479940 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.499396 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.520740 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.539209 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.559799 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.578358 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.599802 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.620287 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.639461 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.659236 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.678997 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.700257 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.720035 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.739385 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.760017 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.780330 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.801028 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.820430 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.840588 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.860270 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.879434 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.900083 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.920264 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.939624 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.960024 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.978111 4869 request.go:700] Waited for 1.001793476s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dmco-proxy-tls&limit=500&resourceVersion=0 Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.980555 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 30 10:56:29 crc kubenswrapper[4869]: I0130 10:56:29.999734 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.020506 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.039483 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.060416 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.079701 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.098833 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.141383 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.160536 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.180078 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.199788 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.202510 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-console-serving-cert\") pod \"console-f9d7485db-z8qjp\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.202550 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-console-oauth-config\") pod \"console-f9d7485db-z8qjp\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.202578 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-console-config\") pod \"console-f9d7485db-z8qjp\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.202624 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2dhh\" (UniqueName: \"kubernetes.io/projected/118c5392-b197-4d57-b07f-66e6f537c4e1-kube-api-access-r2dhh\") pod \"machine-approver-56656f9798-n5r98\" (UID: \"118c5392-b197-4d57-b07f-66e6f537c4e1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n5r98" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.202651 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b66a8fd2-73df-48dd-b697-95b2c50e01cd-registry-certificates\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.202674 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a945a8a2-155f-4e1d-a636-a04711e6e40c-audit-dir\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.202807 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/118c5392-b197-4d57-b07f-66e6f537c4e1-auth-proxy-config\") pod \"machine-approver-56656f9798-n5r98\" (UID: \"118c5392-b197-4d57-b07f-66e6f537c4e1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n5r98" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.202903 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-encryption-config\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.202955 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-trusted-ca-bundle\") pod \"console-f9d7485db-z8qjp\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.202980 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-oauth-serving-cert\") pod \"console-f9d7485db-z8qjp\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.203008 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-audit-dir\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.203042 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e9ed3410-fb43-440e-8d7f-832850050d0c-client-ca\") pod \"controller-manager-879f6c89f-pr488\" (UID: \"e9ed3410-fb43-440e-8d7f-832850050d0c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.203162 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.203209 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.203240 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-etcd-serving-ca\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.203268 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.203361 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b66a8fd2-73df-48dd-b697-95b2c50e01cd-ca-trust-extracted\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.203395 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjknl\" (UniqueName: \"kubernetes.io/projected/a945a8a2-155f-4e1d-a636-a04711e6e40c-kube-api-access-wjknl\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.203419 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-image-import-ca\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.203576 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwh2c\" (UniqueName: \"kubernetes.io/projected/b66a8fd2-73df-48dd-b697-95b2c50e01cd-kube-api-access-lwh2c\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: E0130 10:56:30.203626 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:30.703608296 +0000 UTC m=+141.253484382 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.203660 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.203693 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.203749 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pbmn\" (UniqueName: \"kubernetes.io/projected/e9ed3410-fb43-440e-8d7f-832850050d0c-kube-api-access-5pbmn\") pod \"controller-manager-879f6c89f-pr488\" (UID: \"e9ed3410-fb43-440e-8d7f-832850050d0c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.203779 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.203816 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-serving-cert\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.203865 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b66a8fd2-73df-48dd-b697-95b2c50e01cd-installation-pull-secrets\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.203911 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-trusted-ca-bundle\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.203966 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.204015 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.204087 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-service-ca\") pod \"console-f9d7485db-z8qjp\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.204152 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rsp7p\" (UniqueName: \"kubernetes.io/projected/054a5a6b-1556-42a3-a4bb-1c25470226cc-kube-api-access-rsp7p\") pod \"downloads-7954f5f757-g46p9\" (UID: \"054a5a6b-1556-42a3-a4bb-1c25470226cc\") " pod="openshift-console/downloads-7954f5f757-g46p9" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.204176 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b66a8fd2-73df-48dd-b697-95b2c50e01cd-trusted-ca\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.204217 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msd2p\" (UniqueName: \"kubernetes.io/projected/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-kube-api-access-msd2p\") pod \"console-f9d7485db-z8qjp\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.204252 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.204291 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.204408 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.204506 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.204546 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e9ed3410-fb43-440e-8d7f-832850050d0c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-pr488\" (UID: \"e9ed3410-fb43-440e-8d7f-832850050d0c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.204608 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b66a8fd2-73df-48dd-b697-95b2c50e01cd-bound-sa-token\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.204684 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/118c5392-b197-4d57-b07f-66e6f537c4e1-machine-approver-tls\") pod \"machine-approver-56656f9798-n5r98\" (UID: \"118c5392-b197-4d57-b07f-66e6f537c4e1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n5r98" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.204752 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-config\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.204825 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-node-pullsecrets\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.204871 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9ed3410-fb43-440e-8d7f-832850050d0c-config\") pod \"controller-manager-879f6c89f-pr488\" (UID: \"e9ed3410-fb43-440e-8d7f-832850050d0c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.204893 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b66a8fd2-73df-48dd-b697-95b2c50e01cd-registry-tls\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.204920 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-audit\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.204996 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-etcd-client\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.205060 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a945a8a2-155f-4e1d-a636-a04711e6e40c-audit-policies\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.205093 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/118c5392-b197-4d57-b07f-66e6f537c4e1-config\") pod \"machine-approver-56656f9798-n5r98\" (UID: \"118c5392-b197-4d57-b07f-66e6f537c4e1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n5r98" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.205125 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzjpk\" (UniqueName: \"kubernetes.io/projected/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-kube-api-access-wzjpk\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.205154 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e9ed3410-fb43-440e-8d7f-832850050d0c-serving-cert\") pod \"controller-manager-879f6c89f-pr488\" (UID: \"e9ed3410-fb43-440e-8d7f-832850050d0c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.220589 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.239619 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.268545 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.280102 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.299541 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.306263 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:30 crc kubenswrapper[4869]: E0130 10:56:30.306439 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:30.806399789 +0000 UTC m=+141.356275875 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.306521 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b2ce252a-14bb-4b72-b4f8-6532d0f9266e-metrics-tls\") pod \"dns-default-djqwl\" (UID: \"b2ce252a-14bb-4b72-b4f8-6532d0f9266e\") " pod="openshift-dns/dns-default-djqwl" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.306586 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjp5z\" (UniqueName: \"kubernetes.io/projected/72e2a345-8b37-45c7-a59b-3935151f1a40-kube-api-access-pjp5z\") pod \"ingress-canary-k5cpz\" (UID: \"72e2a345-8b37-45c7-a59b-3935151f1a40\") " pod="openshift-ingress-canary/ingress-canary-k5cpz" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.306634 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-console-oauth-config\") pod \"console-f9d7485db-z8qjp\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.306669 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b66a8fd2-73df-48dd-b697-95b2c50e01cd-registry-certificates\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.306704 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a945a8a2-155f-4e1d-a636-a04711e6e40c-audit-dir\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.306766 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/118c5392-b197-4d57-b07f-66e6f537c4e1-auth-proxy-config\") pod \"machine-approver-56656f9798-n5r98\" (UID: \"118c5392-b197-4d57-b07f-66e6f537c4e1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n5r98" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.306805 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b79e0dd-2d4a-4983-9ed6-fca070b074d7-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qfdm9\" (UID: \"4b79e0dd-2d4a-4983-9ed6-fca070b074d7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qfdm9" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.306838 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e9ed3410-fb43-440e-8d7f-832850050d0c-client-ca\") pod \"controller-manager-879f6c89f-pr488\" (UID: \"e9ed3410-fb43-440e-8d7f-832850050d0c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.306867 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/73188a34-7fd8-4169-8b59-693f746607ed-metrics-tls\") pod \"ingress-operator-5b745b69d9-fn57m\" (UID: \"73188a34-7fd8-4169-8b59-693f746607ed\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-fn57m" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.306895 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-trusted-ca-bundle\") pod \"console-f9d7485db-z8qjp\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.306895 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a945a8a2-155f-4e1d-a636-a04711e6e40c-audit-dir\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.306921 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-oauth-serving-cert\") pod \"console-f9d7485db-z8qjp\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.306954 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: E0130 10:56:30.307498 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:30.807475952 +0000 UTC m=+141.357352018 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.308087 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b66a8fd2-73df-48dd-b697-95b2c50e01cd-registry-certificates\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.308468 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-trusted-ca-bundle\") pod \"console-f9d7485db-z8qjp\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.308541 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/118c5392-b197-4d57-b07f-66e6f537c4e1-auth-proxy-config\") pod \"machine-approver-56656f9798-n5r98\" (UID: \"118c5392-b197-4d57-b07f-66e6f537c4e1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n5r98" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.308592 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-oauth-serving-cert\") pod \"console-f9d7485db-z8qjp\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.308774 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e9ed3410-fb43-440e-8d7f-832850050d0c-client-ca\") pod \"controller-manager-879f6c89f-pr488\" (UID: \"e9ed3410-fb43-440e-8d7f-832850050d0c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.308867 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfdwp\" (UniqueName: \"kubernetes.io/projected/37701f0a-dcf3-407e-9331-76e8c8cd871e-kube-api-access-gfdwp\") pod \"openshift-controller-manager-operator-756b6f6bc6-s22tp\" (UID: \"37701f0a-dcf3-407e-9331-76e8c8cd871e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s22tp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.308971 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9cfkz\" (UniqueName: \"kubernetes.io/projected/c6d4c3d5-598b-48a3-8a46-3d4997a4e67c-kube-api-access-9cfkz\") pod \"csi-hostpathplugin-tntbs\" (UID: \"c6d4c3d5-598b-48a3-8a46-3d4997a4e67c\") " pod="hostpath-provisioner/csi-hostpathplugin-tntbs" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.309911 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n572r\" (UniqueName: \"kubernetes.io/projected/1e366e0f-3cc0-4742-9edf-28e5257e9310-kube-api-access-n572r\") pod \"service-ca-operator-777779d784-rwgbc\" (UID: \"1e366e0f-3cc0-4742-9edf-28e5257e9310\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rwgbc" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.310035 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/df4da501-b52b-426b-9e7a-e3b62240c111-profile-collector-cert\") pod \"catalog-operator-68c6474976-xxd67\" (UID: \"df4da501-b52b-426b-9e7a-e3b62240c111\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xxd67" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.310371 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/77e5e845-c1c8-4915-9b8d-d716558c2528-etcd-ca\") pod \"etcd-operator-b45778765-55vfj\" (UID: \"77e5e845-c1c8-4915-9b8d-d716558c2528\") " pod="openshift-etcd-operator/etcd-operator-b45778765-55vfj" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.310516 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqdbr\" (UniqueName: \"kubernetes.io/projected/73188a34-7fd8-4169-8b59-693f746607ed-kube-api-access-kqdbr\") pod \"ingress-operator-5b745b69d9-fn57m\" (UID: \"73188a34-7fd8-4169-8b59-693f746607ed\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-fn57m" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.310682 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjknl\" (UniqueName: \"kubernetes.io/projected/a945a8a2-155f-4e1d-a636-a04711e6e40c-kube-api-access-wjknl\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.310775 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-image-import-ca\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.310815 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/b53b05e8-9e7c-48ad-9a9b-535ce038924e-tmpfs\") pod \"packageserver-d55dfcdfc-2frn4\" (UID: \"b53b05e8-9e7c-48ad-9a9b-535ce038924e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2frn4" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.310909 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.310983 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.311025 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7adee160-9300-4ae8-b89e-c9a939b4f354-auth-proxy-config\") pod \"machine-config-operator-74547568cd-grv7k\" (UID: \"7adee160-9300-4ae8-b89e-c9a939b4f354\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-grv7k" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.311073 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pbmn\" (UniqueName: \"kubernetes.io/projected/e9ed3410-fb43-440e-8d7f-832850050d0c-kube-api-access-5pbmn\") pod \"controller-manager-879f6c89f-pr488\" (UID: \"e9ed3410-fb43-440e-8d7f-832850050d0c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.311110 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgjgp\" (UniqueName: \"kubernetes.io/projected/5bba43c5-f7f3-4939-a9a7-f191927f7d64-kube-api-access-cgjgp\") pod \"multus-admission-controller-857f4d67dd-g9lcg\" (UID: \"5bba43c5-f7f3-4939-a9a7-f191927f7d64\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-g9lcg" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.311149 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/567cbdfc-b521-456f-9255-7b2cc1d6d19f-srv-cert\") pod \"olm-operator-6b444d44fb-cs2xw\" (UID: \"567cbdfc-b521-456f-9255-7b2cc1d6d19f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs2xw" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.311191 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e366e0f-3cc0-4742-9edf-28e5257e9310-serving-cert\") pod \"service-ca-operator-777779d784-rwgbc\" (UID: \"1e366e0f-3cc0-4742-9edf-28e5257e9310\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rwgbc" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.311225 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b79e0dd-2d4a-4983-9ed6-fca070b074d7-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qfdm9\" (UID: \"4b79e0dd-2d4a-4983-9ed6-fca070b074d7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qfdm9" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.311268 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.311310 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-serving-cert\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.311348 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-trusted-ca-bundle\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.311388 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/37701f0a-dcf3-407e-9331-76e8c8cd871e-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-s22tp\" (UID: \"37701f0a-dcf3-407e-9331-76e8c8cd871e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s22tp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.312310 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.312451 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.312525 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cxhv\" (UniqueName: \"kubernetes.io/projected/44c5913f-a9a0-4b9f-aa60-c6158d19a38a-kube-api-access-5cxhv\") pod \"marketplace-operator-79b997595-rkwmf\" (UID: \"44c5913f-a9a0-4b9f-aa60-c6158d19a38a\") " pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.312559 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmnft\" (UniqueName: \"kubernetes.io/projected/7c385c07-3f8d-4f69-a0c6-c4e3d6d141ee-kube-api-access-rmnft\") pod \"package-server-manager-789f6589d5-f2zrw\" (UID: \"7c385c07-3f8d-4f69-a0c6-c4e3d6d141ee\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f2zrw" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.313057 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-trusted-ca-bundle\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.313214 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-image-import-ca\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.313967 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.314062 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-service-ca\") pod \"console-f9d7485db-z8qjp\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.314248 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rsp7p\" (UniqueName: \"kubernetes.io/projected/054a5a6b-1556-42a3-a4bb-1c25470226cc-kube-api-access-rsp7p\") pod \"downloads-7954f5f757-g46p9\" (UID: \"054a5a6b-1556-42a3-a4bb-1c25470226cc\") " pod="openshift-console/downloads-7954f5f757-g46p9" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.314330 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/5bba43c5-f7f3-4939-a9a7-f191927f7d64-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-g9lcg\" (UID: \"5bba43c5-f7f3-4939-a9a7-f191927f7d64\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-g9lcg" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.314394 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fm6k\" (UniqueName: \"kubernetes.io/projected/77e5e845-c1c8-4915-9b8d-d716558c2528-kube-api-access-2fm6k\") pod \"etcd-operator-b45778765-55vfj\" (UID: \"77e5e845-c1c8-4915-9b8d-d716558c2528\") " pod="openshift-etcd-operator/etcd-operator-b45778765-55vfj" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.314466 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/780c0d74-7c0b-4fd0-9ed9-23fdca188bf6-node-bootstrap-token\") pod \"machine-config-server-vplc5\" (UID: \"780c0d74-7c0b-4fd0-9ed9-23fdca188bf6\") " pod="openshift-machine-config-operator/machine-config-server-vplc5" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.314586 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/624037a4-840b-4c6d-806b-6b0d2276328d-secret-volume\") pod \"collect-profiles-29496165-hwsc7\" (UID: \"624037a4-840b-4c6d-806b-6b0d2276328d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496165-hwsc7" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.314693 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-service-ca\") pod \"console-f9d7485db-z8qjp\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.314727 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b66a8fd2-73df-48dd-b697-95b2c50e01cd-trusted-ca\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.314784 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msd2p\" (UniqueName: \"kubernetes.io/projected/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-kube-api-access-msd2p\") pod \"console-f9d7485db-z8qjp\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.314823 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/2ca2af71-e5ad-4800-9d2c-d79a15a031ad-signing-key\") pod \"service-ca-9c57cc56f-br4ps\" (UID: \"2ca2af71-e5ad-4800-9d2c-d79a15a031ad\") " pod="openshift-service-ca/service-ca-9c57cc56f-br4ps" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.314860 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzqgl\" (UniqueName: \"kubernetes.io/projected/567cbdfc-b521-456f-9255-7b2cc1d6d19f-kube-api-access-rzqgl\") pod \"olm-operator-6b444d44fb-cs2xw\" (UID: \"567cbdfc-b521-456f-9255-7b2cc1d6d19f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs2xw" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.314897 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/77e5e845-c1c8-4915-9b8d-d716558c2528-etcd-service-ca\") pod \"etcd-operator-b45778765-55vfj\" (UID: \"77e5e845-c1c8-4915-9b8d-d716558c2528\") " pod="openshift-etcd-operator/etcd-operator-b45778765-55vfj" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.314942 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/44c5913f-a9a0-4b9f-aa60-c6158d19a38a-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rkwmf\" (UID: \"44c5913f-a9a0-4b9f-aa60-c6158d19a38a\") " pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.314985 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37701f0a-dcf3-407e-9331-76e8c8cd871e-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-s22tp\" (UID: \"37701f0a-dcf3-407e-9331-76e8c8cd871e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s22tp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.315050 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njdcv\" (UniqueName: \"kubernetes.io/projected/7adee160-9300-4ae8-b89e-c9a939b4f354-kube-api-access-njdcv\") pod \"machine-config-operator-74547568cd-grv7k\" (UID: \"7adee160-9300-4ae8-b89e-c9a939b4f354\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-grv7k" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.315103 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e9ed3410-fb43-440e-8d7f-832850050d0c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-pr488\" (UID: \"e9ed3410-fb43-440e-8d7f-832850050d0c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.315162 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-config\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.315203 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/c6d4c3d5-598b-48a3-8a46-3d4997a4e67c-plugins-dir\") pod \"csi-hostpathplugin-tntbs\" (UID: \"c6d4c3d5-598b-48a3-8a46-3d4997a4e67c\") " pod="hostpath-provisioner/csi-hostpathplugin-tntbs" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.315242 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/c6d4c3d5-598b-48a3-8a46-3d4997a4e67c-csi-data-dir\") pod \"csi-hostpathplugin-tntbs\" (UID: \"c6d4c3d5-598b-48a3-8a46-3d4997a4e67c\") " pod="hostpath-provisioner/csi-hostpathplugin-tntbs" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.315283 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-node-pullsecrets\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.315336 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b66a8fd2-73df-48dd-b697-95b2c50e01cd-registry-tls\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.315375 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9ed3410-fb43-440e-8d7f-832850050d0c-config\") pod \"controller-manager-879f6c89f-pr488\" (UID: \"e9ed3410-fb43-440e-8d7f-832850050d0c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.315415 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b53b05e8-9e7c-48ad-9a9b-535ce038924e-webhook-cert\") pod \"packageserver-d55dfcdfc-2frn4\" (UID: \"b53b05e8-9e7c-48ad-9a9b-535ce038924e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2frn4" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.315448 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/c6d4c3d5-598b-48a3-8a46-3d4997a4e67c-mountpoint-dir\") pod \"csi-hostpathplugin-tntbs\" (UID: \"c6d4c3d5-598b-48a3-8a46-3d4997a4e67c\") " pod="hostpath-provisioner/csi-hostpathplugin-tntbs" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.315459 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-node-pullsecrets\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.315484 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ftd7\" (UniqueName: \"kubernetes.io/projected/b2ce252a-14bb-4b72-b4f8-6532d0f9266e-kube-api-access-5ftd7\") pod \"dns-default-djqwl\" (UID: \"b2ce252a-14bb-4b72-b4f8-6532d0f9266e\") " pod="openshift-dns/dns-default-djqwl" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.315570 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/083c9bb4-c4ed-4217-bff5-3babe25ac772-service-ca-bundle\") pod \"router-default-5444994796-dvjw7\" (UID: \"083c9bb4-c4ed-4217-bff5-3babe25ac772\") " pod="openshift-ingress/router-default-5444994796-dvjw7" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.315633 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfqt8\" (UniqueName: \"kubernetes.io/projected/780c0d74-7c0b-4fd0-9ed9-23fdca188bf6-kube-api-access-qfqt8\") pod \"machine-config-server-vplc5\" (UID: \"780c0d74-7c0b-4fd0-9ed9-23fdca188bf6\") " pod="openshift-machine-config-operator/machine-config-server-vplc5" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.315670 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxwfc\" (UniqueName: \"kubernetes.io/projected/ca984745-756b-429d-83d9-c7699271eaa1-kube-api-access-rxwfc\") pod \"kube-storage-version-migrator-operator-b67b599dd-8hs7b\" (UID: \"ca984745-756b-429d-83d9-c7699271eaa1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8hs7b" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.315742 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b66a8fd2-73df-48dd-b697-95b2c50e01cd-trusted-ca\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.315771 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/118c5392-b197-4d57-b07f-66e6f537c4e1-config\") pod \"machine-approver-56656f9798-n5r98\" (UID: \"118c5392-b197-4d57-b07f-66e6f537c4e1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n5r98" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.315852 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzjpk\" (UniqueName: \"kubernetes.io/projected/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-kube-api-access-wzjpk\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.315929 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/7adee160-9300-4ae8-b89e-c9a939b4f354-images\") pod \"machine-config-operator-74547568cd-grv7k\" (UID: \"7adee160-9300-4ae8-b89e-c9a939b4f354\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-grv7k" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.316033 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/7c385c07-3f8d-4f69-a0c6-c4e3d6d141ee-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-f2zrw\" (UID: \"7c385c07-3f8d-4f69-a0c6-c4e3d6d141ee\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f2zrw" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.316097 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.316096 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.316117 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7adee160-9300-4ae8-b89e-c9a939b4f354-proxy-tls\") pod \"machine-config-operator-74547568cd-grv7k\" (UID: \"7adee160-9300-4ae8-b89e-c9a939b4f354\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-grv7k" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.316174 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e9ed3410-fb43-440e-8d7f-832850050d0c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-pr488\" (UID: \"e9ed3410-fb43-440e-8d7f-832850050d0c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.316190 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/73188a34-7fd8-4169-8b59-693f746607ed-bound-sa-token\") pod \"ingress-operator-5b745b69d9-fn57m\" (UID: \"73188a34-7fd8-4169-8b59-693f746607ed\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-fn57m" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.316318 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-console-serving-cert\") pod \"console-f9d7485db-z8qjp\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.316401 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.316468 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-console-config\") pod \"console-f9d7485db-z8qjp\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.316500 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2dhh\" (UniqueName: \"kubernetes.io/projected/118c5392-b197-4d57-b07f-66e6f537c4e1-kube-api-access-r2dhh\") pod \"machine-approver-56656f9798-n5r98\" (UID: \"118c5392-b197-4d57-b07f-66e6f537c4e1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n5r98" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.316525 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/118c5392-b197-4d57-b07f-66e6f537c4e1-config\") pod \"machine-approver-56656f9798-n5r98\" (UID: \"118c5392-b197-4d57-b07f-66e6f537c4e1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n5r98" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.316529 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/8a9ab7f0-7df0-452e-a879-3a7344a1778f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-xr2sj\" (UID: \"8a9ab7f0-7df0-452e-a879-3a7344a1778f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xr2sj" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.316553 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2bcc963a-ffa6-45fb-a7c3-c83b891b74e0-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-4z8pc\" (UID: \"2bcc963a-ffa6-45fb-a7c3-c83b891b74e0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4z8pc" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.316812 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9ed3410-fb43-440e-8d7f-832850050d0c-config\") pod \"controller-manager-879f6c89f-pr488\" (UID: \"e9ed3410-fb43-440e-8d7f-832850050d0c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.316944 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/624037a4-840b-4c6d-806b-6b0d2276328d-config-volume\") pod \"collect-profiles-29496165-hwsc7\" (UID: \"624037a4-840b-4c6d-806b-6b0d2276328d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496165-hwsc7" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.316992 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/44c5913f-a9a0-4b9f-aa60-c6158d19a38a-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rkwmf\" (UID: \"44c5913f-a9a0-4b9f-aa60-c6158d19a38a\") " pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317018 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-encryption-config\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317039 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-audit-dir\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317059 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b2ce252a-14bb-4b72-b4f8-6532d0f9266e-config-volume\") pod \"dns-default-djqwl\" (UID: \"b2ce252a-14bb-4b72-b4f8-6532d0f9266e\") " pod="openshift-dns/dns-default-djqwl" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317101 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/72e2a345-8b37-45c7-a59b-3935151f1a40-cert\") pod \"ingress-canary-k5cpz\" (UID: \"72e2a345-8b37-45c7-a59b-3935151f1a40\") " pod="openshift-ingress-canary/ingress-canary-k5cpz" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317120 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5ffm\" (UniqueName: \"kubernetes.io/projected/083c9bb4-c4ed-4217-bff5-3babe25ac772-kube-api-access-q5ffm\") pod \"router-default-5444994796-dvjw7\" (UID: \"083c9bb4-c4ed-4217-bff5-3babe25ac772\") " pod="openshift-ingress/router-default-5444994796-dvjw7" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317141 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317161 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-etcd-serving-ca\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317182 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317222 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b66a8fd2-73df-48dd-b697-95b2c50e01cd-ca-trust-extracted\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317245 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-config\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317327 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-console-config\") pod \"console-f9d7485db-z8qjp\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317490 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/77e5e845-c1c8-4915-9b8d-d716558c2528-etcd-client\") pod \"etcd-operator-b45778765-55vfj\" (UID: \"77e5e845-c1c8-4915-9b8d-d716558c2528\") " pod="openshift-etcd-operator/etcd-operator-b45778765-55vfj" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317532 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/77e5e845-c1c8-4915-9b8d-d716558c2528-serving-cert\") pod \"etcd-operator-b45778765-55vfj\" (UID: \"77e5e845-c1c8-4915-9b8d-d716558c2528\") " pod="openshift-etcd-operator/etcd-operator-b45778765-55vfj" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317559 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d807cf30-7238-4b0b-9363-3380cee63802-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-ndht4\" (UID: \"d807cf30-7238-4b0b-9363-3380cee63802\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ndht4" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317579 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d807cf30-7238-4b0b-9363-3380cee63802-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-ndht4\" (UID: \"d807cf30-7238-4b0b-9363-3380cee63802\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ndht4" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317585 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b66a8fd2-73df-48dd-b697-95b2c50e01cd-ca-trust-extracted\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317597 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/df4da501-b52b-426b-9e7a-e3b62240c111-srv-cert\") pod \"catalog-operator-68c6474976-xxd67\" (UID: \"df4da501-b52b-426b-9e7a-e3b62240c111\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xxd67" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317629 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-audit-dir\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317634 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8xr4\" (UniqueName: \"kubernetes.io/projected/df4da501-b52b-426b-9e7a-e3b62240c111-kube-api-access-d8xr4\") pod \"catalog-operator-68c6474976-xxd67\" (UID: \"df4da501-b52b-426b-9e7a-e3b62240c111\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xxd67" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317674 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwh2c\" (UniqueName: \"kubernetes.io/projected/b66a8fd2-73df-48dd-b697-95b2c50e01cd-kube-api-access-lwh2c\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317730 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b53b05e8-9e7c-48ad-9a9b-535ce038924e-apiservice-cert\") pod \"packageserver-d55dfcdfc-2frn4\" (UID: \"b53b05e8-9e7c-48ad-9a9b-535ce038924e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2frn4" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317815 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/083c9bb4-c4ed-4217-bff5-3babe25ac772-metrics-certs\") pod \"router-default-5444994796-dvjw7\" (UID: \"083c9bb4-c4ed-4217-bff5-3babe25ac772\") " pod="openshift-ingress/router-default-5444994796-dvjw7" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317927 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbjzc\" (UniqueName: \"kubernetes.io/projected/c8c818c5-31ab-4eff-88c7-a2f73ffe6eaa-kube-api-access-zbjzc\") pod \"migrator-59844c95c7-mx9wm\" (UID: \"c8c818c5-31ab-4eff-88c7-a2f73ffe6eaa\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mx9wm" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317959 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9q7t\" (UniqueName: \"kubernetes.io/projected/b53b05e8-9e7c-48ad-9a9b-535ce038924e-kube-api-access-x9q7t\") pod \"packageserver-d55dfcdfc-2frn4\" (UID: \"b53b05e8-9e7c-48ad-9a9b-535ce038924e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2frn4" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317975 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-serving-cert\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.317991 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/780c0d74-7c0b-4fd0-9ed9-23fdca188bf6-certs\") pod \"machine-config-server-vplc5\" (UID: \"780c0d74-7c0b-4fd0-9ed9-23fdca188bf6\") " pod="openshift-machine-config-operator/machine-config-server-vplc5" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.318028 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/c6d4c3d5-598b-48a3-8a46-3d4997a4e67c-socket-dir\") pod \"csi-hostpathplugin-tntbs\" (UID: \"c6d4c3d5-598b-48a3-8a46-3d4997a4e67c\") " pod="hostpath-provisioner/csi-hostpathplugin-tntbs" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.318072 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e366e0f-3cc0-4742-9edf-28e5257e9310-config\") pod \"service-ca-operator-777779d784-rwgbc\" (UID: \"1e366e0f-3cc0-4742-9edf-28e5257e9310\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rwgbc" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.318123 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4b79e0dd-2d4a-4983-9ed6-fca070b074d7-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qfdm9\" (UID: \"4b79e0dd-2d4a-4983-9ed6-fca070b074d7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qfdm9" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.318147 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b66a8fd2-73df-48dd-b697-95b2c50e01cd-installation-pull-secrets\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.318202 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzcbx\" (UniqueName: \"kubernetes.io/projected/2bcc963a-ffa6-45fb-a7c3-c83b891b74e0-kube-api-access-dzcbx\") pod \"machine-config-controller-84d6567774-4z8pc\" (UID: \"2bcc963a-ffa6-45fb-a7c3-c83b891b74e0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4z8pc" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.318428 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.318583 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.318664 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/083c9bb4-c4ed-4217-bff5-3babe25ac772-default-certificate\") pod \"router-default-5444994796-dvjw7\" (UID: \"083c9bb4-c4ed-4217-bff5-3babe25ac772\") " pod="openshift-ingress/router-default-5444994796-dvjw7" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.318743 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/73188a34-7fd8-4169-8b59-693f746607ed-trusted-ca\") pod \"ingress-operator-5b745b69d9-fn57m\" (UID: \"73188a34-7fd8-4169-8b59-693f746607ed\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-fn57m" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.318787 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.318823 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n795r\" (UniqueName: \"kubernetes.io/projected/2ca2af71-e5ad-4800-9d2c-d79a15a031ad-kube-api-access-n795r\") pod \"service-ca-9c57cc56f-br4ps\" (UID: \"2ca2af71-e5ad-4800-9d2c-d79a15a031ad\") " pod="openshift-service-ca/service-ca-9c57cc56f-br4ps" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.318907 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b66a8fd2-73df-48dd-b697-95b2c50e01cd-bound-sa-token\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.318967 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.319000 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.319604 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-console-oauth-config\") pod \"console-f9d7485db-z8qjp\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.320378 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.320419 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/118c5392-b197-4d57-b07f-66e6f537c4e1-machine-approver-tls\") pod \"machine-approver-56656f9798-n5r98\" (UID: \"118c5392-b197-4d57-b07f-66e6f537c4e1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n5r98" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.320455 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8hl9\" (UniqueName: \"kubernetes.io/projected/8a9ab7f0-7df0-452e-a879-3a7344a1778f-kube-api-access-k8hl9\") pod \"control-plane-machine-set-operator-78cbb6b69f-xr2sj\" (UID: \"8a9ab7f0-7df0-452e-a879-3a7344a1778f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xr2sj" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.320479 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77e5e845-c1c8-4915-9b8d-d716558c2528-config\") pod \"etcd-operator-b45778765-55vfj\" (UID: \"77e5e845-c1c8-4915-9b8d-d716558c2528\") " pod="openshift-etcd-operator/etcd-operator-b45778765-55vfj" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.320477 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-etcd-serving-ca\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.320600 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b66a8fd2-73df-48dd-b697-95b2c50e01cd-registry-tls\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.320635 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d807cf30-7238-4b0b-9363-3380cee63802-config\") pod \"kube-controller-manager-operator-78b949d7b-ndht4\" (UID: \"d807cf30-7238-4b0b-9363-3380cee63802\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ndht4" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.320696 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsc5j\" (UniqueName: \"kubernetes.io/projected/624037a4-840b-4c6d-806b-6b0d2276328d-kube-api-access-nsc5j\") pod \"collect-profiles-29496165-hwsc7\" (UID: \"624037a4-840b-4c6d-806b-6b0d2276328d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496165-hwsc7" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.320748 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/083c9bb4-c4ed-4217-bff5-3babe25ac772-stats-auth\") pod \"router-default-5444994796-dvjw7\" (UID: \"083c9bb4-c4ed-4217-bff5-3babe25ac772\") " pod="openshift-ingress/router-default-5444994796-dvjw7" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.321048 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-audit\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.321121 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-etcd-client\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.321133 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b66a8fd2-73df-48dd-b697-95b2c50e01cd-installation-pull-secrets\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.321195 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2bcc963a-ffa6-45fb-a7c3-c83b891b74e0-proxy-tls\") pod \"machine-config-controller-84d6567774-4z8pc\" (UID: \"2bcc963a-ffa6-45fb-a7c3-c83b891b74e0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4z8pc" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.321257 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca984745-756b-429d-83d9-c7699271eaa1-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-8hs7b\" (UID: \"ca984745-756b-429d-83d9-c7699271eaa1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8hs7b" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.321333 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/567cbdfc-b521-456f-9255-7b2cc1d6d19f-profile-collector-cert\") pod \"olm-operator-6b444d44fb-cs2xw\" (UID: \"567cbdfc-b521-456f-9255-7b2cc1d6d19f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs2xw" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.321347 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.321396 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/2ca2af71-e5ad-4800-9d2c-d79a15a031ad-signing-cabundle\") pod \"service-ca-9c57cc56f-br4ps\" (UID: \"2ca2af71-e5ad-4800-9d2c-d79a15a031ad\") " pod="openshift-service-ca/service-ca-9c57cc56f-br4ps" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.321456 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/c6d4c3d5-598b-48a3-8a46-3d4997a4e67c-registration-dir\") pod \"csi-hostpathplugin-tntbs\" (UID: \"c6d4c3d5-598b-48a3-8a46-3d4997a4e67c\") " pod="hostpath-provisioner/csi-hostpathplugin-tntbs" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.321550 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca984745-756b-429d-83d9-c7699271eaa1-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-8hs7b\" (UID: \"ca984745-756b-429d-83d9-c7699271eaa1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8hs7b" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.321590 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-console-serving-cert\") pod \"console-f9d7485db-z8qjp\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.321633 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a945a8a2-155f-4e1d-a636-a04711e6e40c-audit-policies\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.321699 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-audit\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.321695 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e9ed3410-fb43-440e-8d7f-832850050d0c-serving-cert\") pod \"controller-manager-879f6c89f-pr488\" (UID: \"e9ed3410-fb43-440e-8d7f-832850050d0c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.322256 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a945a8a2-155f-4e1d-a636-a04711e6e40c-audit-policies\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.322601 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-encryption-config\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.323182 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.323752 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.325001 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e9ed3410-fb43-440e-8d7f-832850050d0c-serving-cert\") pod \"controller-manager-879f6c89f-pr488\" (UID: \"e9ed3410-fb43-440e-8d7f-832850050d0c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.325563 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.325733 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.326065 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/118c5392-b197-4d57-b07f-66e6f537c4e1-machine-approver-tls\") pod \"machine-approver-56656f9798-n5r98\" (UID: \"118c5392-b197-4d57-b07f-66e6f537c4e1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n5r98" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.327835 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-etcd-client\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.341074 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.359673 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.380387 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.400792 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.420532 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.424842 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:30 crc kubenswrapper[4869]: E0130 10:56:30.425078 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:30.9250436 +0000 UTC m=+141.474919676 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425184 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7adee160-9300-4ae8-b89e-c9a939b4f354-auth-proxy-config\") pod \"machine-config-operator-74547568cd-grv7k\" (UID: \"7adee160-9300-4ae8-b89e-c9a939b4f354\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-grv7k" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425238 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgjgp\" (UniqueName: \"kubernetes.io/projected/5bba43c5-f7f3-4939-a9a7-f191927f7d64-kube-api-access-cgjgp\") pod \"multus-admission-controller-857f4d67dd-g9lcg\" (UID: \"5bba43c5-f7f3-4939-a9a7-f191927f7d64\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-g9lcg" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425259 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/567cbdfc-b521-456f-9255-7b2cc1d6d19f-srv-cert\") pod \"olm-operator-6b444d44fb-cs2xw\" (UID: \"567cbdfc-b521-456f-9255-7b2cc1d6d19f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs2xw" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425282 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e366e0f-3cc0-4742-9edf-28e5257e9310-serving-cert\") pod \"service-ca-operator-777779d784-rwgbc\" (UID: \"1e366e0f-3cc0-4742-9edf-28e5257e9310\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rwgbc" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425301 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b79e0dd-2d4a-4983-9ed6-fca070b074d7-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qfdm9\" (UID: \"4b79e0dd-2d4a-4983-9ed6-fca070b074d7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qfdm9" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425324 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/37701f0a-dcf3-407e-9331-76e8c8cd871e-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-s22tp\" (UID: \"37701f0a-dcf3-407e-9331-76e8c8cd871e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s22tp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425347 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cxhv\" (UniqueName: \"kubernetes.io/projected/44c5913f-a9a0-4b9f-aa60-c6158d19a38a-kube-api-access-5cxhv\") pod \"marketplace-operator-79b997595-rkwmf\" (UID: \"44c5913f-a9a0-4b9f-aa60-c6158d19a38a\") " pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425375 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmnft\" (UniqueName: \"kubernetes.io/projected/7c385c07-3f8d-4f69-a0c6-c4e3d6d141ee-kube-api-access-rmnft\") pod \"package-server-manager-789f6589d5-f2zrw\" (UID: \"7c385c07-3f8d-4f69-a0c6-c4e3d6d141ee\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f2zrw" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425420 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/5bba43c5-f7f3-4939-a9a7-f191927f7d64-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-g9lcg\" (UID: \"5bba43c5-f7f3-4939-a9a7-f191927f7d64\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-g9lcg" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425442 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fm6k\" (UniqueName: \"kubernetes.io/projected/77e5e845-c1c8-4915-9b8d-d716558c2528-kube-api-access-2fm6k\") pod \"etcd-operator-b45778765-55vfj\" (UID: \"77e5e845-c1c8-4915-9b8d-d716558c2528\") " pod="openshift-etcd-operator/etcd-operator-b45778765-55vfj" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425462 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/780c0d74-7c0b-4fd0-9ed9-23fdca188bf6-node-bootstrap-token\") pod \"machine-config-server-vplc5\" (UID: \"780c0d74-7c0b-4fd0-9ed9-23fdca188bf6\") " pod="openshift-machine-config-operator/machine-config-server-vplc5" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425480 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/624037a4-840b-4c6d-806b-6b0d2276328d-secret-volume\") pod \"collect-profiles-29496165-hwsc7\" (UID: \"624037a4-840b-4c6d-806b-6b0d2276328d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496165-hwsc7" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425499 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/77e5e845-c1c8-4915-9b8d-d716558c2528-etcd-service-ca\") pod \"etcd-operator-b45778765-55vfj\" (UID: \"77e5e845-c1c8-4915-9b8d-d716558c2528\") " pod="openshift-etcd-operator/etcd-operator-b45778765-55vfj" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425528 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/2ca2af71-e5ad-4800-9d2c-d79a15a031ad-signing-key\") pod \"service-ca-9c57cc56f-br4ps\" (UID: \"2ca2af71-e5ad-4800-9d2c-d79a15a031ad\") " pod="openshift-service-ca/service-ca-9c57cc56f-br4ps" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425547 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzqgl\" (UniqueName: \"kubernetes.io/projected/567cbdfc-b521-456f-9255-7b2cc1d6d19f-kube-api-access-rzqgl\") pod \"olm-operator-6b444d44fb-cs2xw\" (UID: \"567cbdfc-b521-456f-9255-7b2cc1d6d19f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs2xw" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425588 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/44c5913f-a9a0-4b9f-aa60-c6158d19a38a-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rkwmf\" (UID: \"44c5913f-a9a0-4b9f-aa60-c6158d19a38a\") " pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425611 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37701f0a-dcf3-407e-9331-76e8c8cd871e-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-s22tp\" (UID: \"37701f0a-dcf3-407e-9331-76e8c8cd871e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s22tp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425632 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njdcv\" (UniqueName: \"kubernetes.io/projected/7adee160-9300-4ae8-b89e-c9a939b4f354-kube-api-access-njdcv\") pod \"machine-config-operator-74547568cd-grv7k\" (UID: \"7adee160-9300-4ae8-b89e-c9a939b4f354\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-grv7k" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425661 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/c6d4c3d5-598b-48a3-8a46-3d4997a4e67c-plugins-dir\") pod \"csi-hostpathplugin-tntbs\" (UID: \"c6d4c3d5-598b-48a3-8a46-3d4997a4e67c\") " pod="hostpath-provisioner/csi-hostpathplugin-tntbs" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425676 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/c6d4c3d5-598b-48a3-8a46-3d4997a4e67c-csi-data-dir\") pod \"csi-hostpathplugin-tntbs\" (UID: \"c6d4c3d5-598b-48a3-8a46-3d4997a4e67c\") " pod="hostpath-provisioner/csi-hostpathplugin-tntbs" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425704 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b53b05e8-9e7c-48ad-9a9b-535ce038924e-webhook-cert\") pod \"packageserver-d55dfcdfc-2frn4\" (UID: \"b53b05e8-9e7c-48ad-9a9b-535ce038924e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2frn4" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425737 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/c6d4c3d5-598b-48a3-8a46-3d4997a4e67c-mountpoint-dir\") pod \"csi-hostpathplugin-tntbs\" (UID: \"c6d4c3d5-598b-48a3-8a46-3d4997a4e67c\") " pod="hostpath-provisioner/csi-hostpathplugin-tntbs" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425753 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfqt8\" (UniqueName: \"kubernetes.io/projected/780c0d74-7c0b-4fd0-9ed9-23fdca188bf6-kube-api-access-qfqt8\") pod \"machine-config-server-vplc5\" (UID: \"780c0d74-7c0b-4fd0-9ed9-23fdca188bf6\") " pod="openshift-machine-config-operator/machine-config-server-vplc5" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425774 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxwfc\" (UniqueName: \"kubernetes.io/projected/ca984745-756b-429d-83d9-c7699271eaa1-kube-api-access-rxwfc\") pod \"kube-storage-version-migrator-operator-b67b599dd-8hs7b\" (UID: \"ca984745-756b-429d-83d9-c7699271eaa1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8hs7b" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425797 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ftd7\" (UniqueName: \"kubernetes.io/projected/b2ce252a-14bb-4b72-b4f8-6532d0f9266e-kube-api-access-5ftd7\") pod \"dns-default-djqwl\" (UID: \"b2ce252a-14bb-4b72-b4f8-6532d0f9266e\") " pod="openshift-dns/dns-default-djqwl" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425814 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/083c9bb4-c4ed-4217-bff5-3babe25ac772-service-ca-bundle\") pod \"router-default-5444994796-dvjw7\" (UID: \"083c9bb4-c4ed-4217-bff5-3babe25ac772\") " pod="openshift-ingress/router-default-5444994796-dvjw7" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425839 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/7adee160-9300-4ae8-b89e-c9a939b4f354-images\") pod \"machine-config-operator-74547568cd-grv7k\" (UID: \"7adee160-9300-4ae8-b89e-c9a939b4f354\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-grv7k" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425858 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/7c385c07-3f8d-4f69-a0c6-c4e3d6d141ee-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-f2zrw\" (UID: \"7c385c07-3f8d-4f69-a0c6-c4e3d6d141ee\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f2zrw" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425878 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7adee160-9300-4ae8-b89e-c9a939b4f354-proxy-tls\") pod \"machine-config-operator-74547568cd-grv7k\" (UID: \"7adee160-9300-4ae8-b89e-c9a939b4f354\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-grv7k" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425895 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/73188a34-7fd8-4169-8b59-693f746607ed-bound-sa-token\") pod \"ingress-operator-5b745b69d9-fn57m\" (UID: \"73188a34-7fd8-4169-8b59-693f746607ed\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-fn57m" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425920 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/8a9ab7f0-7df0-452e-a879-3a7344a1778f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-xr2sj\" (UID: \"8a9ab7f0-7df0-452e-a879-3a7344a1778f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xr2sj" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425940 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2bcc963a-ffa6-45fb-a7c3-c83b891b74e0-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-4z8pc\" (UID: \"2bcc963a-ffa6-45fb-a7c3-c83b891b74e0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4z8pc" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425956 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/624037a4-840b-4c6d-806b-6b0d2276328d-config-volume\") pod \"collect-profiles-29496165-hwsc7\" (UID: \"624037a4-840b-4c6d-806b-6b0d2276328d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496165-hwsc7" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425975 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/44c5913f-a9a0-4b9f-aa60-c6158d19a38a-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rkwmf\" (UID: \"44c5913f-a9a0-4b9f-aa60-c6158d19a38a\") " pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.425997 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b2ce252a-14bb-4b72-b4f8-6532d0f9266e-config-volume\") pod \"dns-default-djqwl\" (UID: \"b2ce252a-14bb-4b72-b4f8-6532d0f9266e\") " pod="openshift-dns/dns-default-djqwl" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426018 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/72e2a345-8b37-45c7-a59b-3935151f1a40-cert\") pod \"ingress-canary-k5cpz\" (UID: \"72e2a345-8b37-45c7-a59b-3935151f1a40\") " pod="openshift-ingress-canary/ingress-canary-k5cpz" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426035 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5ffm\" (UniqueName: \"kubernetes.io/projected/083c9bb4-c4ed-4217-bff5-3babe25ac772-kube-api-access-q5ffm\") pod \"router-default-5444994796-dvjw7\" (UID: \"083c9bb4-c4ed-4217-bff5-3babe25ac772\") " pod="openshift-ingress/router-default-5444994796-dvjw7" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426067 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/77e5e845-c1c8-4915-9b8d-d716558c2528-etcd-client\") pod \"etcd-operator-b45778765-55vfj\" (UID: \"77e5e845-c1c8-4915-9b8d-d716558c2528\") " pod="openshift-etcd-operator/etcd-operator-b45778765-55vfj" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426083 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/77e5e845-c1c8-4915-9b8d-d716558c2528-serving-cert\") pod \"etcd-operator-b45778765-55vfj\" (UID: \"77e5e845-c1c8-4915-9b8d-d716558c2528\") " pod="openshift-etcd-operator/etcd-operator-b45778765-55vfj" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426099 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d807cf30-7238-4b0b-9363-3380cee63802-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-ndht4\" (UID: \"d807cf30-7238-4b0b-9363-3380cee63802\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ndht4" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426116 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d807cf30-7238-4b0b-9363-3380cee63802-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-ndht4\" (UID: \"d807cf30-7238-4b0b-9363-3380cee63802\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ndht4" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426135 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/df4da501-b52b-426b-9e7a-e3b62240c111-srv-cert\") pod \"catalog-operator-68c6474976-xxd67\" (UID: \"df4da501-b52b-426b-9e7a-e3b62240c111\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xxd67" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426153 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8xr4\" (UniqueName: \"kubernetes.io/projected/df4da501-b52b-426b-9e7a-e3b62240c111-kube-api-access-d8xr4\") pod \"catalog-operator-68c6474976-xxd67\" (UID: \"df4da501-b52b-426b-9e7a-e3b62240c111\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xxd67" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426176 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7adee160-9300-4ae8-b89e-c9a939b4f354-auth-proxy-config\") pod \"machine-config-operator-74547568cd-grv7k\" (UID: \"7adee160-9300-4ae8-b89e-c9a939b4f354\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-grv7k" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426184 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b53b05e8-9e7c-48ad-9a9b-535ce038924e-apiservice-cert\") pod \"packageserver-d55dfcdfc-2frn4\" (UID: \"b53b05e8-9e7c-48ad-9a9b-535ce038924e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2frn4" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426252 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/083c9bb4-c4ed-4217-bff5-3babe25ac772-metrics-certs\") pod \"router-default-5444994796-dvjw7\" (UID: \"083c9bb4-c4ed-4217-bff5-3babe25ac772\") " pod="openshift-ingress/router-default-5444994796-dvjw7" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426282 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9q7t\" (UniqueName: \"kubernetes.io/projected/b53b05e8-9e7c-48ad-9a9b-535ce038924e-kube-api-access-x9q7t\") pod \"packageserver-d55dfcdfc-2frn4\" (UID: \"b53b05e8-9e7c-48ad-9a9b-535ce038924e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2frn4" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426302 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbjzc\" (UniqueName: \"kubernetes.io/projected/c8c818c5-31ab-4eff-88c7-a2f73ffe6eaa-kube-api-access-zbjzc\") pod \"migrator-59844c95c7-mx9wm\" (UID: \"c8c818c5-31ab-4eff-88c7-a2f73ffe6eaa\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mx9wm" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426329 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/780c0d74-7c0b-4fd0-9ed9-23fdca188bf6-certs\") pod \"machine-config-server-vplc5\" (UID: \"780c0d74-7c0b-4fd0-9ed9-23fdca188bf6\") " pod="openshift-machine-config-operator/machine-config-server-vplc5" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426356 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/c6d4c3d5-598b-48a3-8a46-3d4997a4e67c-socket-dir\") pod \"csi-hostpathplugin-tntbs\" (UID: \"c6d4c3d5-598b-48a3-8a46-3d4997a4e67c\") " pod="hostpath-provisioner/csi-hostpathplugin-tntbs" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426375 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e366e0f-3cc0-4742-9edf-28e5257e9310-config\") pod \"service-ca-operator-777779d784-rwgbc\" (UID: \"1e366e0f-3cc0-4742-9edf-28e5257e9310\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rwgbc" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426412 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4b79e0dd-2d4a-4983-9ed6-fca070b074d7-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qfdm9\" (UID: \"4b79e0dd-2d4a-4983-9ed6-fca070b074d7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qfdm9" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426442 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzcbx\" (UniqueName: \"kubernetes.io/projected/2bcc963a-ffa6-45fb-a7c3-c83b891b74e0-kube-api-access-dzcbx\") pod \"machine-config-controller-84d6567774-4z8pc\" (UID: \"2bcc963a-ffa6-45fb-a7c3-c83b891b74e0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4z8pc" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426488 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n795r\" (UniqueName: \"kubernetes.io/projected/2ca2af71-e5ad-4800-9d2c-d79a15a031ad-kube-api-access-n795r\") pod \"service-ca-9c57cc56f-br4ps\" (UID: \"2ca2af71-e5ad-4800-9d2c-d79a15a031ad\") " pod="openshift-service-ca/service-ca-9c57cc56f-br4ps" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426510 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/083c9bb4-c4ed-4217-bff5-3babe25ac772-default-certificate\") pod \"router-default-5444994796-dvjw7\" (UID: \"083c9bb4-c4ed-4217-bff5-3babe25ac772\") " pod="openshift-ingress/router-default-5444994796-dvjw7" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426534 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/73188a34-7fd8-4169-8b59-693f746607ed-trusted-ca\") pod \"ingress-operator-5b745b69d9-fn57m\" (UID: \"73188a34-7fd8-4169-8b59-693f746607ed\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-fn57m" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426581 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77e5e845-c1c8-4915-9b8d-d716558c2528-config\") pod \"etcd-operator-b45778765-55vfj\" (UID: \"77e5e845-c1c8-4915-9b8d-d716558c2528\") " pod="openshift-etcd-operator/etcd-operator-b45778765-55vfj" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426663 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8hl9\" (UniqueName: \"kubernetes.io/projected/8a9ab7f0-7df0-452e-a879-3a7344a1778f-kube-api-access-k8hl9\") pod \"control-plane-machine-set-operator-78cbb6b69f-xr2sj\" (UID: \"8a9ab7f0-7df0-452e-a879-3a7344a1778f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xr2sj" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426695 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d807cf30-7238-4b0b-9363-3380cee63802-config\") pod \"kube-controller-manager-operator-78b949d7b-ndht4\" (UID: \"d807cf30-7238-4b0b-9363-3380cee63802\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ndht4" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426763 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsc5j\" (UniqueName: \"kubernetes.io/projected/624037a4-840b-4c6d-806b-6b0d2276328d-kube-api-access-nsc5j\") pod \"collect-profiles-29496165-hwsc7\" (UID: \"624037a4-840b-4c6d-806b-6b0d2276328d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496165-hwsc7" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426795 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/083c9bb4-c4ed-4217-bff5-3babe25ac772-stats-auth\") pod \"router-default-5444994796-dvjw7\" (UID: \"083c9bb4-c4ed-4217-bff5-3babe25ac772\") " pod="openshift-ingress/router-default-5444994796-dvjw7" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426823 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca984745-756b-429d-83d9-c7699271eaa1-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-8hs7b\" (UID: \"ca984745-756b-429d-83d9-c7699271eaa1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8hs7b" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426856 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2bcc963a-ffa6-45fb-a7c3-c83b891b74e0-proxy-tls\") pod \"machine-config-controller-84d6567774-4z8pc\" (UID: \"2bcc963a-ffa6-45fb-a7c3-c83b891b74e0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4z8pc" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426880 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/567cbdfc-b521-456f-9255-7b2cc1d6d19f-profile-collector-cert\") pod \"olm-operator-6b444d44fb-cs2xw\" (UID: \"567cbdfc-b521-456f-9255-7b2cc1d6d19f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs2xw" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426907 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca984745-756b-429d-83d9-c7699271eaa1-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-8hs7b\" (UID: \"ca984745-756b-429d-83d9-c7699271eaa1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8hs7b" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426936 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/2ca2af71-e5ad-4800-9d2c-d79a15a031ad-signing-cabundle\") pod \"service-ca-9c57cc56f-br4ps\" (UID: \"2ca2af71-e5ad-4800-9d2c-d79a15a031ad\") " pod="openshift-service-ca/service-ca-9c57cc56f-br4ps" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.426970 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/c6d4c3d5-598b-48a3-8a46-3d4997a4e67c-registration-dir\") pod \"csi-hostpathplugin-tntbs\" (UID: \"c6d4c3d5-598b-48a3-8a46-3d4997a4e67c\") " pod="hostpath-provisioner/csi-hostpathplugin-tntbs" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.427036 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b2ce252a-14bb-4b72-b4f8-6532d0f9266e-metrics-tls\") pod \"dns-default-djqwl\" (UID: \"b2ce252a-14bb-4b72-b4f8-6532d0f9266e\") " pod="openshift-dns/dns-default-djqwl" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.427063 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjp5z\" (UniqueName: \"kubernetes.io/projected/72e2a345-8b37-45c7-a59b-3935151f1a40-kube-api-access-pjp5z\") pod \"ingress-canary-k5cpz\" (UID: \"72e2a345-8b37-45c7-a59b-3935151f1a40\") " pod="openshift-ingress-canary/ingress-canary-k5cpz" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.427098 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b79e0dd-2d4a-4983-9ed6-fca070b074d7-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qfdm9\" (UID: \"4b79e0dd-2d4a-4983-9ed6-fca070b074d7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qfdm9" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.427126 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/73188a34-7fd8-4169-8b59-693f746607ed-metrics-tls\") pod \"ingress-operator-5b745b69d9-fn57m\" (UID: \"73188a34-7fd8-4169-8b59-693f746607ed\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-fn57m" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.427161 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.427188 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfdwp\" (UniqueName: \"kubernetes.io/projected/37701f0a-dcf3-407e-9331-76e8c8cd871e-kube-api-access-gfdwp\") pod \"openshift-controller-manager-operator-756b6f6bc6-s22tp\" (UID: \"37701f0a-dcf3-407e-9331-76e8c8cd871e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s22tp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.427219 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9cfkz\" (UniqueName: \"kubernetes.io/projected/c6d4c3d5-598b-48a3-8a46-3d4997a4e67c-kube-api-access-9cfkz\") pod \"csi-hostpathplugin-tntbs\" (UID: \"c6d4c3d5-598b-48a3-8a46-3d4997a4e67c\") " pod="hostpath-provisioner/csi-hostpathplugin-tntbs" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.427254 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n572r\" (UniqueName: \"kubernetes.io/projected/1e366e0f-3cc0-4742-9edf-28e5257e9310-kube-api-access-n572r\") pod \"service-ca-operator-777779d784-rwgbc\" (UID: \"1e366e0f-3cc0-4742-9edf-28e5257e9310\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rwgbc" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.427280 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/df4da501-b52b-426b-9e7a-e3b62240c111-profile-collector-cert\") pod \"catalog-operator-68c6474976-xxd67\" (UID: \"df4da501-b52b-426b-9e7a-e3b62240c111\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xxd67" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.427306 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/77e5e845-c1c8-4915-9b8d-d716558c2528-etcd-ca\") pod \"etcd-operator-b45778765-55vfj\" (UID: \"77e5e845-c1c8-4915-9b8d-d716558c2528\") " pod="openshift-etcd-operator/etcd-operator-b45778765-55vfj" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.427344 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqdbr\" (UniqueName: \"kubernetes.io/projected/73188a34-7fd8-4169-8b59-693f746607ed-kube-api-access-kqdbr\") pod \"ingress-operator-5b745b69d9-fn57m\" (UID: \"73188a34-7fd8-4169-8b59-693f746607ed\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-fn57m" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.427401 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/b53b05e8-9e7c-48ad-9a9b-535ce038924e-tmpfs\") pod \"packageserver-d55dfcdfc-2frn4\" (UID: \"b53b05e8-9e7c-48ad-9a9b-535ce038924e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2frn4" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.427596 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/7adee160-9300-4ae8-b89e-c9a939b4f354-images\") pod \"machine-config-operator-74547568cd-grv7k\" (UID: \"7adee160-9300-4ae8-b89e-c9a939b4f354\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-grv7k" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.427740 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/083c9bb4-c4ed-4217-bff5-3babe25ac772-service-ca-bundle\") pod \"router-default-5444994796-dvjw7\" (UID: \"083c9bb4-c4ed-4217-bff5-3babe25ac772\") " pod="openshift-ingress/router-default-5444994796-dvjw7" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.427954 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/b53b05e8-9e7c-48ad-9a9b-535ce038924e-tmpfs\") pod \"packageserver-d55dfcdfc-2frn4\" (UID: \"b53b05e8-9e7c-48ad-9a9b-535ce038924e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2frn4" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.429021 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2bcc963a-ffa6-45fb-a7c3-c83b891b74e0-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-4z8pc\" (UID: \"2bcc963a-ffa6-45fb-a7c3-c83b891b74e0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4z8pc" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.430528 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/c6d4c3d5-598b-48a3-8a46-3d4997a4e67c-socket-dir\") pod \"csi-hostpathplugin-tntbs\" (UID: \"c6d4c3d5-598b-48a3-8a46-3d4997a4e67c\") " pod="hostpath-provisioner/csi-hostpathplugin-tntbs" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.430777 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/44c5913f-a9a0-4b9f-aa60-c6158d19a38a-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rkwmf\" (UID: \"44c5913f-a9a0-4b9f-aa60-c6158d19a38a\") " pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.431310 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/37701f0a-dcf3-407e-9331-76e8c8cd871e-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-s22tp\" (UID: \"37701f0a-dcf3-407e-9331-76e8c8cd871e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s22tp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.431421 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/77e5e845-c1c8-4915-9b8d-d716558c2528-etcd-service-ca\") pod \"etcd-operator-b45778765-55vfj\" (UID: \"77e5e845-c1c8-4915-9b8d-d716558c2528\") " pod="openshift-etcd-operator/etcd-operator-b45778765-55vfj" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.431669 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77e5e845-c1c8-4915-9b8d-d716558c2528-config\") pod \"etcd-operator-b45778765-55vfj\" (UID: \"77e5e845-c1c8-4915-9b8d-d716558c2528\") " pod="openshift-etcd-operator/etcd-operator-b45778765-55vfj" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.431888 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/8a9ab7f0-7df0-452e-a879-3a7344a1778f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-xr2sj\" (UID: \"8a9ab7f0-7df0-452e-a879-3a7344a1778f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xr2sj" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.432119 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/77e5e845-c1c8-4915-9b8d-d716558c2528-serving-cert\") pod \"etcd-operator-b45778765-55vfj\" (UID: \"77e5e845-c1c8-4915-9b8d-d716558c2528\") " pod="openshift-etcd-operator/etcd-operator-b45778765-55vfj" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.432631 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d807cf30-7238-4b0b-9363-3380cee63802-config\") pod \"kube-controller-manager-operator-78b949d7b-ndht4\" (UID: \"d807cf30-7238-4b0b-9363-3380cee63802\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ndht4" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.432699 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/73188a34-7fd8-4169-8b59-693f746607ed-trusted-ca\") pod \"ingress-operator-5b745b69d9-fn57m\" (UID: \"73188a34-7fd8-4169-8b59-693f746607ed\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-fn57m" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.432737 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b79e0dd-2d4a-4983-9ed6-fca070b074d7-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qfdm9\" (UID: \"4b79e0dd-2d4a-4983-9ed6-fca070b074d7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qfdm9" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.432835 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37701f0a-dcf3-407e-9331-76e8c8cd871e-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-s22tp\" (UID: \"37701f0a-dcf3-407e-9331-76e8c8cd871e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s22tp" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.433282 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/c6d4c3d5-598b-48a3-8a46-3d4997a4e67c-mountpoint-dir\") pod \"csi-hostpathplugin-tntbs\" (UID: \"c6d4c3d5-598b-48a3-8a46-3d4997a4e67c\") " pod="hostpath-provisioner/csi-hostpathplugin-tntbs" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.433354 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/083c9bb4-c4ed-4217-bff5-3babe25ac772-stats-auth\") pod \"router-default-5444994796-dvjw7\" (UID: \"083c9bb4-c4ed-4217-bff5-3babe25ac772\") " pod="openshift-ingress/router-default-5444994796-dvjw7" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.433400 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/083c9bb4-c4ed-4217-bff5-3babe25ac772-default-certificate\") pod \"router-default-5444994796-dvjw7\" (UID: \"083c9bb4-c4ed-4217-bff5-3babe25ac772\") " pod="openshift-ingress/router-default-5444994796-dvjw7" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.433438 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/c6d4c3d5-598b-48a3-8a46-3d4997a4e67c-registration-dir\") pod \"csi-hostpathplugin-tntbs\" (UID: \"c6d4c3d5-598b-48a3-8a46-3d4997a4e67c\") " pod="hostpath-provisioner/csi-hostpathplugin-tntbs" Jan 30 10:56:30 crc kubenswrapper[4869]: E0130 10:56:30.433974 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:30.933946694 +0000 UTC m=+141.483822970 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.433994 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/77e5e845-c1c8-4915-9b8d-d716558c2528-etcd-ca\") pod \"etcd-operator-b45778765-55vfj\" (UID: \"77e5e845-c1c8-4915-9b8d-d716558c2528\") " pod="openshift-etcd-operator/etcd-operator-b45778765-55vfj" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.434108 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b79e0dd-2d4a-4983-9ed6-fca070b074d7-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qfdm9\" (UID: \"4b79e0dd-2d4a-4983-9ed6-fca070b074d7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qfdm9" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.434315 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d807cf30-7238-4b0b-9363-3380cee63802-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-ndht4\" (UID: \"d807cf30-7238-4b0b-9363-3380cee63802\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ndht4" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.434390 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/c6d4c3d5-598b-48a3-8a46-3d4997a4e67c-plugins-dir\") pod \"csi-hostpathplugin-tntbs\" (UID: \"c6d4c3d5-598b-48a3-8a46-3d4997a4e67c\") " pod="hostpath-provisioner/csi-hostpathplugin-tntbs" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.434589 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e366e0f-3cc0-4742-9edf-28e5257e9310-serving-cert\") pod \"service-ca-operator-777779d784-rwgbc\" (UID: \"1e366e0f-3cc0-4742-9edf-28e5257e9310\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rwgbc" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.434817 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/c6d4c3d5-598b-48a3-8a46-3d4997a4e67c-csi-data-dir\") pod \"csi-hostpathplugin-tntbs\" (UID: \"c6d4c3d5-598b-48a3-8a46-3d4997a4e67c\") " pod="hostpath-provisioner/csi-hostpathplugin-tntbs" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.435611 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca984745-756b-429d-83d9-c7699271eaa1-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-8hs7b\" (UID: \"ca984745-756b-429d-83d9-c7699271eaa1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8hs7b" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.436416 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/5bba43c5-f7f3-4939-a9a7-f191927f7d64-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-g9lcg\" (UID: \"5bba43c5-f7f3-4939-a9a7-f191927f7d64\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-g9lcg" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.436611 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b53b05e8-9e7c-48ad-9a9b-535ce038924e-webhook-cert\") pod \"packageserver-d55dfcdfc-2frn4\" (UID: \"b53b05e8-9e7c-48ad-9a9b-535ce038924e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2frn4" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.437019 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2bcc963a-ffa6-45fb-a7c3-c83b891b74e0-proxy-tls\") pod \"machine-config-controller-84d6567774-4z8pc\" (UID: \"2bcc963a-ffa6-45fb-a7c3-c83b891b74e0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4z8pc" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.437118 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/df4da501-b52b-426b-9e7a-e3b62240c111-srv-cert\") pod \"catalog-operator-68c6474976-xxd67\" (UID: \"df4da501-b52b-426b-9e7a-e3b62240c111\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xxd67" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.437301 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/567cbdfc-b521-456f-9255-7b2cc1d6d19f-profile-collector-cert\") pod \"olm-operator-6b444d44fb-cs2xw\" (UID: \"567cbdfc-b521-456f-9255-7b2cc1d6d19f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs2xw" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.437311 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/77e5e845-c1c8-4915-9b8d-d716558c2528-etcd-client\") pod \"etcd-operator-b45778765-55vfj\" (UID: \"77e5e845-c1c8-4915-9b8d-d716558c2528\") " pod="openshift-etcd-operator/etcd-operator-b45778765-55vfj" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.437901 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7adee160-9300-4ae8-b89e-c9a939b4f354-proxy-tls\") pod \"machine-config-operator-74547568cd-grv7k\" (UID: \"7adee160-9300-4ae8-b89e-c9a939b4f354\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-grv7k" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.438307 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/083c9bb4-c4ed-4217-bff5-3babe25ac772-metrics-certs\") pod \"router-default-5444994796-dvjw7\" (UID: \"083c9bb4-c4ed-4217-bff5-3babe25ac772\") " pod="openshift-ingress/router-default-5444994796-dvjw7" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.438873 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/44c5913f-a9a0-4b9f-aa60-c6158d19a38a-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rkwmf\" (UID: \"44c5913f-a9a0-4b9f-aa60-c6158d19a38a\") " pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.439239 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.439518 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b53b05e8-9e7c-48ad-9a9b-535ce038924e-apiservice-cert\") pod \"packageserver-d55dfcdfc-2frn4\" (UID: \"b53b05e8-9e7c-48ad-9a9b-535ce038924e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2frn4" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.439770 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca984745-756b-429d-83d9-c7699271eaa1-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-8hs7b\" (UID: \"ca984745-756b-429d-83d9-c7699271eaa1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8hs7b" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.440583 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/df4da501-b52b-426b-9e7a-e3b62240c111-profile-collector-cert\") pod \"catalog-operator-68c6474976-xxd67\" (UID: \"df4da501-b52b-426b-9e7a-e3b62240c111\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xxd67" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.443158 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e366e0f-3cc0-4742-9edf-28e5257e9310-config\") pod \"service-ca-operator-777779d784-rwgbc\" (UID: \"1e366e0f-3cc0-4742-9edf-28e5257e9310\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rwgbc" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.446575 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/624037a4-840b-4c6d-806b-6b0d2276328d-secret-volume\") pod \"collect-profiles-29496165-hwsc7\" (UID: \"624037a4-840b-4c6d-806b-6b0d2276328d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496165-hwsc7" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.449076 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/73188a34-7fd8-4169-8b59-693f746607ed-metrics-tls\") pod \"ingress-operator-5b745b69d9-fn57m\" (UID: \"73188a34-7fd8-4169-8b59-693f746607ed\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-fn57m" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.459880 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.480741 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.492231 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/567cbdfc-b521-456f-9255-7b2cc1d6d19f-srv-cert\") pod \"olm-operator-6b444d44fb-cs2xw\" (UID: \"567cbdfc-b521-456f-9255-7b2cc1d6d19f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs2xw" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.500874 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.511293 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/7c385c07-3f8d-4f69-a0c6-c4e3d6d141ee-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-f2zrw\" (UID: \"7c385c07-3f8d-4f69-a0c6-c4e3d6d141ee\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f2zrw" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.518925 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.528740 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:30 crc kubenswrapper[4869]: E0130 10:56:30.528929 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:31.028876645 +0000 UTC m=+141.578752721 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.529174 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: E0130 10:56:30.529779 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:31.029752112 +0000 UTC m=+141.579628178 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.540206 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.559091 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.568236 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/2ca2af71-e5ad-4800-9d2c-d79a15a031ad-signing-key\") pod \"service-ca-9c57cc56f-br4ps\" (UID: \"2ca2af71-e5ad-4800-9d2c-d79a15a031ad\") " pod="openshift-service-ca/service-ca-9c57cc56f-br4ps" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.579027 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.585118 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/2ca2af71-e5ad-4800-9d2c-d79a15a031ad-signing-cabundle\") pod \"service-ca-9c57cc56f-br4ps\" (UID: \"2ca2af71-e5ad-4800-9d2c-d79a15a031ad\") " pod="openshift-service-ca/service-ca-9c57cc56f-br4ps" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.600048 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.620383 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.630913 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:30 crc kubenswrapper[4869]: E0130 10:56:30.631086 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:31.131054859 +0000 UTC m=+141.680930965 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.631159 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/624037a4-840b-4c6d-806b-6b0d2276328d-config-volume\") pod \"collect-profiles-29496165-hwsc7\" (UID: \"624037a4-840b-4c6d-806b-6b0d2276328d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496165-hwsc7" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.631832 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: E0130 10:56:30.632308 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:31.132296097 +0000 UTC m=+141.682172173 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.640367 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.660725 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.679730 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.699944 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.715268 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/72e2a345-8b37-45c7-a59b-3935151f1a40-cert\") pod \"ingress-canary-k5cpz\" (UID: \"72e2a345-8b37-45c7-a59b-3935151f1a40\") " pod="openshift-ingress-canary/ingress-canary-k5cpz" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.719828 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.733146 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:30 crc kubenswrapper[4869]: E0130 10:56:30.733332 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:31.233301995 +0000 UTC m=+141.783178081 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.733676 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: E0130 10:56:30.734358 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:31.234341207 +0000 UTC m=+141.784217293 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.767583 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qrd6\" (UniqueName: \"kubernetes.io/projected/bc978c96-efc2-4963-af6b-bd987cb81bed-kube-api-access-4qrd6\") pod \"openshift-config-operator-7777fb866f-vxcwv\" (UID: \"bc978c96-efc2-4963-af6b-bd987cb81bed\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vxcwv" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.776862 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8btpx\" (UniqueName: \"kubernetes.io/projected/1f81e19a-bac5-4dd5-a294-1a026d0b9cd6-kube-api-access-8btpx\") pod \"apiserver-7bbb656c7d-z5mpb\" (UID: \"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.798517 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vq9p7\" (UniqueName: \"kubernetes.io/projected/4a057949-f846-40b0-bc8e-66c8c8d0d1d4-kube-api-access-vq9p7\") pod \"cluster-samples-operator-665b6dd947-jkrt8\" (UID: \"4a057949-f846-40b0-bc8e-66c8c8d0d1d4\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jkrt8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.808232 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jkrt8" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.825425 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpdc2\" (UniqueName: \"kubernetes.io/projected/3cc1509a-74f3-4f56-9742-d8c9e57359d4-kube-api-access-vpdc2\") pod \"authentication-operator-69f744f599-tt6p5\" (UID: \"3cc1509a-74f3-4f56-9742-d8c9e57359d4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tt6p5" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.835201 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:30 crc kubenswrapper[4869]: E0130 10:56:30.835345 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:31.335311994 +0000 UTC m=+141.885188080 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.835777 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: E0130 10:56:30.836347 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:31.336327266 +0000 UTC m=+141.886203352 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.841163 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pd5xr\" (UniqueName: \"kubernetes.io/projected/3de467a9-b50e-4af7-816d-c346960a39af-kube-api-access-pd5xr\") pod \"route-controller-manager-6576b87f9c-fqgwq\" (UID: \"3de467a9-b50e-4af7-816d-c346960a39af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.859394 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plwrm\" (UniqueName: \"kubernetes.io/projected/92b8f00c-4b15-49a1-ac91-aef68b07cb74-kube-api-access-plwrm\") pod \"dns-operator-744455d44c-8khhj\" (UID: \"92b8f00c-4b15-49a1-ac91-aef68b07cb74\") " pod="openshift-dns-operator/dns-operator-744455d44c-8khhj" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.876990 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7d256fa3-7889-415c-8e01-0b43802365a5-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-qgkdr\" (UID: \"7d256fa3-7889-415c-8e01-0b43802365a5\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qgkdr" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.880620 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vxcwv" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.899278 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wbpq\" (UniqueName: \"kubernetes.io/projected/86d09af6-3f55-44c6-a7c4-feef5a08a1fb-kube-api-access-5wbpq\") pod \"cluster-image-registry-operator-dc59b4c8b-j8fvx\" (UID: \"86d09af6-3f55-44c6-a7c4-feef5a08a1fb\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j8fvx" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.918436 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zr2ml\" (UniqueName: \"kubernetes.io/projected/04600a3e-ea6a-4828-bf49-4f97a92f2f4d-kube-api-access-zr2ml\") pod \"machine-api-operator-5694c8668f-cdspc\" (UID: \"04600a3e-ea6a-4828-bf49-4f97a92f2f4d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-cdspc" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.936860 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.937176 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:30 crc kubenswrapper[4869]: E0130 10:56:30.937624 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:31.437608022 +0000 UTC m=+141.987484088 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.937684 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:30 crc kubenswrapper[4869]: E0130 10:56:30.938154 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:31.438146239 +0000 UTC m=+141.988022305 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.945991 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjwz9\" (UniqueName: \"kubernetes.io/projected/740c30a1-6a0c-479f-9f11-62c969da6044-kube-api-access-gjwz9\") pod \"openshift-apiserver-operator-796bbdcf4f-ctbj2\" (UID: \"740c30a1-6a0c-479f-9f11-62c969da6044\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ctbj2" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.960122 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/86d09af6-3f55-44c6-a7c4-feef5a08a1fb-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-j8fvx\" (UID: \"86d09af6-3f55-44c6-a7c4-feef5a08a1fb\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j8fvx" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.979302 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.980199 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p252w\" (UniqueName: \"kubernetes.io/projected/5637ae84-f53a-48d5-87c6-1de13d92c181-kube-api-access-p252w\") pod \"console-operator-58897d9998-xdsnd\" (UID: \"5637ae84-f53a-48d5-87c6-1de13d92c181\") " pod="openshift-console-operator/console-operator-58897d9998-xdsnd" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.983888 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/780c0d74-7c0b-4fd0-9ed9-23fdca188bf6-certs\") pod \"machine-config-server-vplc5\" (UID: \"780c0d74-7c0b-4fd0-9ed9-23fdca188bf6\") " pod="openshift-machine-config-operator/machine-config-server-vplc5" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.984237 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.996870 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qgkdr" Jan 30 10:56:30 crc kubenswrapper[4869]: I0130 10:56:30.997709 4869 request.go:700] Waited for 1.857250449s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dmachine-config-server-dockercfg-qx5rd&limit=500&resourceVersion=0 Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.000824 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.012854 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-tt6p5" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.022079 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.028821 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jkrt8"] Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.032891 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/780c0d74-7c0b-4fd0-9ed9-23fdca188bf6-node-bootstrap-token\") pod \"machine-config-server-vplc5\" (UID: \"780c0d74-7c0b-4fd0-9ed9-23fdca188bf6\") " pod="openshift-machine-config-operator/machine-config-server-vplc5" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.038457 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:31 crc kubenswrapper[4869]: E0130 10:56:31.038979 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:31.538956961 +0000 UTC m=+142.088833027 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.039863 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.059961 4869 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.061191 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-8khhj" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.076866 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j8fvx" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.080480 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.097124 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-xdsnd" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.099164 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-vxcwv"] Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.099543 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.107804 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b2ce252a-14bb-4b72-b4f8-6532d0f9266e-config-volume\") pod \"dns-default-djqwl\" (UID: \"b2ce252a-14bb-4b72-b4f8-6532d0f9266e\") " pod="openshift-dns/dns-default-djqwl" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.122192 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.138854 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-cdspc" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.140450 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:31 crc kubenswrapper[4869]: E0130 10:56:31.140950 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:31.640935779 +0000 UTC m=+142.190811835 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.142109 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.148826 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b2ce252a-14bb-4b72-b4f8-6532d0f9266e-metrics-tls\") pod \"dns-default-djqwl\" (UID: \"b2ce252a-14bb-4b72-b4f8-6532d0f9266e\") " pod="openshift-dns/dns-default-djqwl" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.155950 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb"] Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.175764 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjknl\" (UniqueName: \"kubernetes.io/projected/a945a8a2-155f-4e1d-a636-a04711e6e40c-kube-api-access-wjknl\") pod \"oauth-openshift-558db77b4-dqfj8\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.190181 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ctbj2" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.196967 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pbmn\" (UniqueName: \"kubernetes.io/projected/e9ed3410-fb43-440e-8d7f-832850050d0c-kube-api-access-5pbmn\") pod \"controller-manager-879f6c89f-pr488\" (UID: \"e9ed3410-fb43-440e-8d7f-832850050d0c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.221478 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rsp7p\" (UniqueName: \"kubernetes.io/projected/054a5a6b-1556-42a3-a4bb-1c25470226cc-kube-api-access-rsp7p\") pod \"downloads-7954f5f757-g46p9\" (UID: \"054a5a6b-1556-42a3-a4bb-1c25470226cc\") " pod="openshift-console/downloads-7954f5f757-g46p9" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.237272 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msd2p\" (UniqueName: \"kubernetes.io/projected/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-kube-api-access-msd2p\") pod \"console-f9d7485db-z8qjp\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.243374 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:31 crc kubenswrapper[4869]: E0130 10:56:31.244030 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:31.74401273 +0000 UTC m=+142.293888786 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.262305 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq"] Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.262937 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzjpk\" (UniqueName: \"kubernetes.io/projected/af5a6cd3-3f75-431f-bea7-cd3197b8fa1b-kube-api-access-wzjpk\") pod \"apiserver-76f77b778f-tnth8\" (UID: \"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b\") " pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.277731 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.278189 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-g46p9" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.284772 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2dhh\" (UniqueName: \"kubernetes.io/projected/118c5392-b197-4d57-b07f-66e6f537c4e1-kube-api-access-r2dhh\") pod \"machine-approver-56656f9798-n5r98\" (UID: \"118c5392-b197-4d57-b07f-66e6f537c4e1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n5r98" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.299440 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-tt6p5"] Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.302107 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwh2c\" (UniqueName: \"kubernetes.io/projected/b66a8fd2-73df-48dd-b697-95b2c50e01cd-kube-api-access-lwh2c\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.320155 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b66a8fd2-73df-48dd-b697-95b2c50e01cd-bound-sa-token\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.353359 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n5r98" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.354796 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:31 crc kubenswrapper[4869]: E0130 10:56:31.355696 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:31.855678846 +0000 UTC m=+142.405554912 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.377382 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxwfc\" (UniqueName: \"kubernetes.io/projected/ca984745-756b-429d-83d9-c7699271eaa1-kube-api-access-rxwfc\") pod \"kube-storage-version-migrator-operator-b67b599dd-8hs7b\" (UID: \"ca984745-756b-429d-83d9-c7699271eaa1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8hs7b" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.399328 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ftd7\" (UniqueName: \"kubernetes.io/projected/b2ce252a-14bb-4b72-b4f8-6532d0f9266e-kube-api-access-5ftd7\") pod \"dns-default-djqwl\" (UID: \"b2ce252a-14bb-4b72-b4f8-6532d0f9266e\") " pod="openshift-dns/dns-default-djqwl" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.401928 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8khhj"] Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.417214 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/73188a34-7fd8-4169-8b59-693f746607ed-bound-sa-token\") pod \"ingress-operator-5b745b69d9-fn57m\" (UID: \"73188a34-7fd8-4169-8b59-693f746607ed\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-fn57m" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.430952 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.436221 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgjgp\" (UniqueName: \"kubernetes.io/projected/5bba43c5-f7f3-4939-a9a7-f191927f7d64-kube-api-access-cgjgp\") pod \"multus-admission-controller-857f4d67dd-g9lcg\" (UID: \"5bba43c5-f7f3-4939-a9a7-f191927f7d64\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-g9lcg" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.442324 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzqgl\" (UniqueName: \"kubernetes.io/projected/567cbdfc-b521-456f-9255-7b2cc1d6d19f-kube-api-access-rzqgl\") pod \"olm-operator-6b444d44fb-cs2xw\" (UID: \"567cbdfc-b521-456f-9255-7b2cc1d6d19f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs2xw" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.442657 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs2xw" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.447872 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j8fvx"] Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.455786 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:31 crc kubenswrapper[4869]: E0130 10:56:31.456618 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:31.956588301 +0000 UTC m=+142.506464377 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.467132 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.471002 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cxhv\" (UniqueName: \"kubernetes.io/projected/44c5913f-a9a0-4b9f-aa60-c6158d19a38a-kube-api-access-5cxhv\") pod \"marketplace-operator-79b997595-rkwmf\" (UID: \"44c5913f-a9a0-4b9f-aa60-c6158d19a38a\") " pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.482413 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmnft\" (UniqueName: \"kubernetes.io/projected/7c385c07-3f8d-4f69-a0c6-c4e3d6d141ee-kube-api-access-rmnft\") pod \"package-server-manager-789f6589d5-f2zrw\" (UID: \"7c385c07-3f8d-4f69-a0c6-c4e3d6d141ee\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f2zrw" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.504383 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fm6k\" (UniqueName: \"kubernetes.io/projected/77e5e845-c1c8-4915-9b8d-d716558c2528-kube-api-access-2fm6k\") pod \"etcd-operator-b45778765-55vfj\" (UID: \"77e5e845-c1c8-4915-9b8d-d716558c2528\") " pod="openshift-etcd-operator/etcd-operator-b45778765-55vfj" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.512604 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-djqwl" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.513925 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qgkdr"] Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.515021 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-xdsnd"] Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.517113 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.518292 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9q7t\" (UniqueName: \"kubernetes.io/projected/b53b05e8-9e7c-48ad-9a9b-535ce038924e-kube-api-access-x9q7t\") pod \"packageserver-d55dfcdfc-2frn4\" (UID: \"b53b05e8-9e7c-48ad-9a9b-535ce038924e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2frn4" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.537231 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbjzc\" (UniqueName: \"kubernetes.io/projected/c8c818c5-31ab-4eff-88c7-a2f73ffe6eaa-kube-api-access-zbjzc\") pod \"migrator-59844c95c7-mx9wm\" (UID: \"c8c818c5-31ab-4eff-88c7-a2f73ffe6eaa\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mx9wm" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.557389 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:31 crc kubenswrapper[4869]: E0130 10:56:31.557938 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:32.057921079 +0000 UTC m=+142.607797305 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.559263 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-cdspc"] Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.563401 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5ffm\" (UniqueName: \"kubernetes.io/projected/083c9bb4-c4ed-4217-bff5-3babe25ac772-kube-api-access-q5ffm\") pod \"router-default-5444994796-dvjw7\" (UID: \"083c9bb4-c4ed-4217-bff5-3babe25ac772\") " pod="openshift-ingress/router-default-5444994796-dvjw7" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.580607 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8hl9\" (UniqueName: \"kubernetes.io/projected/8a9ab7f0-7df0-452e-a879-3a7344a1778f-kube-api-access-k8hl9\") pod \"control-plane-machine-set-operator-78cbb6b69f-xr2sj\" (UID: \"8a9ab7f0-7df0-452e-a879-3a7344a1778f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xr2sj" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.595308 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4b79e0dd-2d4a-4983-9ed6-fca070b074d7-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qfdm9\" (UID: \"4b79e0dd-2d4a-4983-9ed6-fca070b074d7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qfdm9" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.608574 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-dvjw7" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.614143 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsc5j\" (UniqueName: \"kubernetes.io/projected/624037a4-840b-4c6d-806b-6b0d2276328d-kube-api-access-nsc5j\") pod \"collect-profiles-29496165-hwsc7\" (UID: \"624037a4-840b-4c6d-806b-6b0d2276328d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496165-hwsc7" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.620195 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8hs7b" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.620467 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ctbj2"] Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.642304 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d807cf30-7238-4b0b-9363-3380cee63802-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-ndht4\" (UID: \"d807cf30-7238-4b0b-9363-3380cee63802\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ndht4" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.644501 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-g46p9"] Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.644700 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-55vfj" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.651241 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qfdm9" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.656556 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfqt8\" (UniqueName: \"kubernetes.io/projected/780c0d74-7c0b-4fd0-9ed9-23fdca188bf6-kube-api-access-qfqt8\") pod \"machine-config-server-vplc5\" (UID: \"780c0d74-7c0b-4fd0-9ed9-23fdca188bf6\") " pod="openshift-machine-config-operator/machine-config-server-vplc5" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.658902 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:31 crc kubenswrapper[4869]: E0130 10:56:31.659330 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:32.159312479 +0000 UTC m=+142.709188545 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.662312 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mx9wm" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.675425 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9cfkz\" (UniqueName: \"kubernetes.io/projected/c6d4c3d5-598b-48a3-8a46-3d4997a4e67c-kube-api-access-9cfkz\") pod \"csi-hostpathplugin-tntbs\" (UID: \"c6d4c3d5-598b-48a3-8a46-3d4997a4e67c\") " pod="hostpath-provisioner/csi-hostpathplugin-tntbs" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.675677 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-g9lcg" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.691647 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ndht4" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.700227 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xr2sj" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.707391 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzcbx\" (UniqueName: \"kubernetes.io/projected/2bcc963a-ffa6-45fb-a7c3-c83b891b74e0-kube-api-access-dzcbx\") pod \"machine-config-controller-84d6567774-4z8pc\" (UID: \"2bcc963a-ffa6-45fb-a7c3-c83b891b74e0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4z8pc" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.708909 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.715618 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2frn4" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.716974 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjp5z\" (UniqueName: \"kubernetes.io/projected/72e2a345-8b37-45c7-a59b-3935151f1a40-kube-api-access-pjp5z\") pod \"ingress-canary-k5cpz\" (UID: \"72e2a345-8b37-45c7-a59b-3935151f1a40\") " pod="openshift-ingress-canary/ingress-canary-k5cpz" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.742902 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n795r\" (UniqueName: \"kubernetes.io/projected/2ca2af71-e5ad-4800-9d2c-d79a15a031ad-kube-api-access-n795r\") pod \"service-ca-9c57cc56f-br4ps\" (UID: \"2ca2af71-e5ad-4800-9d2c-d79a15a031ad\") " pod="openshift-service-ca/service-ca-9c57cc56f-br4ps" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.750135 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f2zrw" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.759700 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-br4ps" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.766038 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:31 crc kubenswrapper[4869]: E0130 10:56:31.766542 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:32.266510208 +0000 UTC m=+142.816386274 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.766926 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496165-hwsc7" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.774274 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n572r\" (UniqueName: \"kubernetes.io/projected/1e366e0f-3cc0-4742-9edf-28e5257e9310-kube-api-access-n572r\") pod \"service-ca-operator-777779d784-rwgbc\" (UID: \"1e366e0f-3cc0-4742-9edf-28e5257e9310\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rwgbc" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.775643 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-tnth8"] Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.775877 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-k5cpz" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.784311 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfdwp\" (UniqueName: \"kubernetes.io/projected/37701f0a-dcf3-407e-9331-76e8c8cd871e-kube-api-access-gfdwp\") pod \"openshift-controller-manager-operator-756b6f6bc6-s22tp\" (UID: \"37701f0a-dcf3-407e-9331-76e8c8cd871e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s22tp" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.784619 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-vplc5" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.802840 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-tntbs" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.812488 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqdbr\" (UniqueName: \"kubernetes.io/projected/73188a34-7fd8-4169-8b59-693f746607ed-kube-api-access-kqdbr\") pod \"ingress-operator-5b745b69d9-fn57m\" (UID: \"73188a34-7fd8-4169-8b59-693f746607ed\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-fn57m" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.816944 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njdcv\" (UniqueName: \"kubernetes.io/projected/7adee160-9300-4ae8-b89e-c9a939b4f354-kube-api-access-njdcv\") pod \"machine-config-operator-74547568cd-grv7k\" (UID: \"7adee160-9300-4ae8-b89e-c9a939b4f354\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-grv7k" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.841450 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8xr4\" (UniqueName: \"kubernetes.io/projected/df4da501-b52b-426b-9e7a-e3b62240c111-kube-api-access-d8xr4\") pod \"catalog-operator-68c6474976-xxd67\" (UID: \"df4da501-b52b-426b-9e7a-e3b62240c111\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xxd67" Jan 30 10:56:31 crc kubenswrapper[4869]: W0130 10:56:31.859555 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf5a6cd3_3f75_431f_bea7_cd3197b8fa1b.slice/crio-7debd54c63ab0759cbd4a0a83fbdde0ef0c6f173e1f3fc57fc9a6c179b61c4bf WatchSource:0}: Error finding container 7debd54c63ab0759cbd4a0a83fbdde0ef0c6f173e1f3fc57fc9a6c179b61c4bf: Status 404 returned error can't find the container with id 7debd54c63ab0759cbd4a0a83fbdde0ef0c6f173e1f3fc57fc9a6c179b61c4bf Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.868193 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:31 crc kubenswrapper[4869]: E0130 10:56:31.868560 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:32.368541197 +0000 UTC m=+142.918417263 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.875852 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs2xw"] Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.928180 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-fn57m" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.935601 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4z8pc" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.955359 4869 generic.go:334] "Generic (PLEG): container finished" podID="bc978c96-efc2-4963-af6b-bd987cb81bed" containerID="c31e0c4fa6985368bacb9a060e4a023bd33b346cc283c3749b45400de8510bd3" exitCode=0 Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.955856 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vxcwv" event={"ID":"bc978c96-efc2-4963-af6b-bd987cb81bed","Type":"ContainerDied","Data":"c31e0c4fa6985368bacb9a060e4a023bd33b346cc283c3749b45400de8510bd3"} Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.955882 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vxcwv" event={"ID":"bc978c96-efc2-4963-af6b-bd987cb81bed","Type":"ContainerStarted","Data":"e0d478bebe1b7a4c49877703b3b58f3f0940fbe1e79655188e5e3468006bfa86"} Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.958783 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8khhj" event={"ID":"92b8f00c-4b15-49a1-ac91-aef68b07cb74","Type":"ContainerStarted","Data":"3673d7d066f08f15a3ef21275ff356ad4b38747e177a1afce6284803db570e2c"} Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.959940 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ctbj2" event={"ID":"740c30a1-6a0c-479f-9f11-62c969da6044","Type":"ContainerStarted","Data":"dee60da765a6a69087824d3caf08e6043c5ca208eb1f8e42cef3c10ea997c2bd"} Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.962296 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-xdsnd" event={"ID":"5637ae84-f53a-48d5-87c6-1de13d92c181","Type":"ContainerStarted","Data":"e9feb23446e08772ced6a01c1d52213262359974b49af024148d0f073e0e40d5"} Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.964131 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jkrt8" event={"ID":"4a057949-f846-40b0-bc8e-66c8c8d0d1d4","Type":"ContainerStarted","Data":"49b3c2bcc97157eaf404f025510cbe142a7e1c34821cacca5e0da5e264c89f23"} Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.969006 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s22tp" Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.969479 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:31 crc kubenswrapper[4869]: E0130 10:56:31.969923 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:32.469907656 +0000 UTC m=+143.019783722 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.971242 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j8fvx" event={"ID":"86d09af6-3f55-44c6-a7c4-feef5a08a1fb","Type":"ContainerStarted","Data":"a06f0e49b60aff3fcf7ce52b649766a39e85dbaf118d3541c739d3bc57b6ba2b"} Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.976698 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qgkdr" event={"ID":"7d256fa3-7889-415c-8e01-0b43802365a5","Type":"ContainerStarted","Data":"bfa81733a2ad5b65c5248eec48bc3a3a27c7841a79260fe47155dc1c5823c00a"} Jan 30 10:56:31 crc kubenswrapper[4869]: I0130 10:56:31.983272 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-grv7k" Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.012412 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" event={"ID":"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6","Type":"ContainerStarted","Data":"49e24673571312dcc7aaa9c9110509f829aafecfbc6d468c249db4e58e9136ae"} Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.021626 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-tnth8" event={"ID":"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b","Type":"ContainerStarted","Data":"7debd54c63ab0759cbd4a0a83fbdde0ef0c6f173e1f3fc57fc9a6c179b61c4bf"} Jan 30 10:56:32 crc kubenswrapper[4869]: W0130 10:56:32.024227 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod567cbdfc_b521_456f_9255_7b2cc1d6d19f.slice/crio-a440182e201f7cb0f2d841e867266064909e48a71d9854cbd3fe6e87f35bccaf WatchSource:0}: Error finding container a440182e201f7cb0f2d841e867266064909e48a71d9854cbd3fe6e87f35bccaf: Status 404 returned error can't find the container with id a440182e201f7cb0f2d841e867266064909e48a71d9854cbd3fe6e87f35bccaf Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.025754 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n5r98" event={"ID":"118c5392-b197-4d57-b07f-66e6f537c4e1","Type":"ContainerStarted","Data":"fe75441b2b46a629732ac32e7bd3edc3c1a38a18788b61d056a46f97264b534a"} Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.027533 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xxd67" Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.034771 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rwgbc" Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.052146 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-tt6p5" event={"ID":"3cc1509a-74f3-4f56-9742-d8c9e57359d4","Type":"ContainerStarted","Data":"6796883d6f886401b5c7b2f652d2b934a5a71aa91a8026a98904e2271d339e2a"} Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.052269 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-tt6p5" event={"ID":"3cc1509a-74f3-4f56-9742-d8c9e57359d4","Type":"ContainerStarted","Data":"ae03e59e13c3364a0cd37e381c83c05de1353d28edd4d1ce1b42f279296a86ed"} Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.054464 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq" event={"ID":"3de467a9-b50e-4af7-816d-c346960a39af","Type":"ContainerStarted","Data":"e55ee18451d7c591a630fde5b90ccddd9b1f5d46a1447b2d971b5590c350f068"} Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.054500 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq" event={"ID":"3de467a9-b50e-4af7-816d-c346960a39af","Type":"ContainerStarted","Data":"6732c8b176d0093e8ae8e04ac73613e947096ece699e2e95f9ecfd2c913cb614"} Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.060954 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-cdspc" event={"ID":"04600a3e-ea6a-4828-bf49-4f97a92f2f4d","Type":"ContainerStarted","Data":"4477b4d1279554865a90309981101cb257135e34bc9565604d90408db32556aa"} Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.064777 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-g46p9" event={"ID":"054a5a6b-1556-42a3-a4bb-1c25470226cc","Type":"ContainerStarted","Data":"60013a6f779584927ce8cdcfc8801704c7c90e5aa9c9c2281acab23bb471467a"} Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.071109 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:32 crc kubenswrapper[4869]: E0130 10:56:32.073064 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:32.57304237 +0000 UTC m=+143.122918436 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.085774 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dqfj8"] Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.173271 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:32 crc kubenswrapper[4869]: E0130 10:56:32.174248 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:32.674233374 +0000 UTC m=+143.224109440 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.196162 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xr2sj"] Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.215988 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-pr488"] Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.248581 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-djqwl"] Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.275808 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:32 crc kubenswrapper[4869]: E0130 10:56:32.275939 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:32.775908922 +0000 UTC m=+143.325784988 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.276563 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:32 crc kubenswrapper[4869]: E0130 10:56:32.277101 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:32.777080788 +0000 UTC m=+143.326956864 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.311113 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rkwmf"] Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.383119 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:32 crc kubenswrapper[4869]: E0130 10:56:32.383884 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:32.883851364 +0000 UTC m=+143.433727440 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.388032 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-z8qjp"] Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.485068 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:32 crc kubenswrapper[4869]: E0130 10:56:32.486023 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:32.986007007 +0000 UTC m=+143.535883073 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:32 crc kubenswrapper[4869]: W0130 10:56:32.569087 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb2ce252a_14bb_4b72_b4f8_6532d0f9266e.slice/crio-922e70ba24ac38e77fdfb259da19d8bed0ac20f32c6e3886684685d261d3f7f4 WatchSource:0}: Error finding container 922e70ba24ac38e77fdfb259da19d8bed0ac20f32c6e3886684685d261d3f7f4: Status 404 returned error can't find the container with id 922e70ba24ac38e77fdfb259da19d8bed0ac20f32c6e3886684685d261d3f7f4 Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.586355 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:32 crc kubenswrapper[4869]: E0130 10:56:32.586753 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:33.086736566 +0000 UTC m=+143.636612622 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.689558 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:32 crc kubenswrapper[4869]: E0130 10:56:32.690016 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:33.189994543 +0000 UTC m=+143.739870769 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.791201 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:32 crc kubenswrapper[4869]: E0130 10:56:32.791419 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:33.291404123 +0000 UTC m=+143.841280189 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.791481 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:32 crc kubenswrapper[4869]: E0130 10:56:32.791807 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:33.291801106 +0000 UTC m=+143.841677172 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.892685 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:32 crc kubenswrapper[4869]: E0130 10:56:32.893276 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:33.393261538 +0000 UTC m=+143.943137594 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.958239 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ndht4"] Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.981506 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8hs7b"] Jan 30 10:56:32 crc kubenswrapper[4869]: I0130 10:56:32.996417 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:32 crc kubenswrapper[4869]: E0130 10:56:32.996864 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:33.496848865 +0000 UTC m=+144.046724931 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.091203 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-z8qjp" event={"ID":"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2","Type":"ContainerStarted","Data":"9a321dc364d7adc27a6a137eb2ad8d1f26e64e1e7cf8da8b45869e31ffe8835d"} Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.093206 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" event={"ID":"a945a8a2-155f-4e1d-a636-a04711e6e40c","Type":"ContainerStarted","Data":"4dd9d0090a93f6ab2cae1daebdfe5e21502ae79974f478fc9bad2508ce2fa5a2"} Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.098543 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:33 crc kubenswrapper[4869]: E0130 10:56:33.098679 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:33.598655478 +0000 UTC m=+144.148531544 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.098792 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:33 crc kubenswrapper[4869]: E0130 10:56:33.099092 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:33.599083941 +0000 UTC m=+144.148960007 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.100931 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j8fvx" event={"ID":"86d09af6-3f55-44c6-a7c4-feef5a08a1fb","Type":"ContainerStarted","Data":"6485a9ab77e71d0093f1dd34260087c5ff087b33c2b2fbd55cf3f6bd8b3637aa"} Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.104035 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-dvjw7" event={"ID":"083c9bb4-c4ed-4217-bff5-3babe25ac772","Type":"ContainerStarted","Data":"fc04c35f5f617112f282d2608fe3212d00331342264442b6c93d9ed0a0a7d6d4"} Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.106981 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs2xw" event={"ID":"567cbdfc-b521-456f-9255-7b2cc1d6d19f","Type":"ContainerStarted","Data":"222f22ffeabd53a8596a9a9f1699c0074bb3b45e05df3d1ed37ed81aece5a048"} Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.107035 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs2xw" event={"ID":"567cbdfc-b521-456f-9255-7b2cc1d6d19f","Type":"ContainerStarted","Data":"a440182e201f7cb0f2d841e867266064909e48a71d9854cbd3fe6e87f35bccaf"} Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.107375 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs2xw" Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.114617 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8khhj" event={"ID":"92b8f00c-4b15-49a1-ac91-aef68b07cb74","Type":"ContainerStarted","Data":"858a890c61d3b3c1b299bba9122a6181c523aaaf03cb4cc9fa0e9be988803023"} Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.116206 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ctbj2" event={"ID":"740c30a1-6a0c-479f-9f11-62c969da6044","Type":"ContainerStarted","Data":"65c75468b0f74f322a6cc85256d0c09ee7f35ecf1bacb18965135054610e38ed"} Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.116949 4869 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-cs2xw container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.34:8443/healthz\": dial tcp 10.217.0.34:8443: connect: connection refused" start-of-body= Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.116997 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs2xw" podUID="567cbdfc-b521-456f-9255-7b2cc1d6d19f" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.34:8443/healthz\": dial tcp 10.217.0.34:8443: connect: connection refused" Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.118813 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-g46p9" event={"ID":"054a5a6b-1556-42a3-a4bb-1c25470226cc","Type":"ContainerStarted","Data":"41707ced091257fbd97b14b87772fc8f563c29388c9dc97c1eb45a2522f442a8"} Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.119454 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-g46p9" Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.124109 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n5r98" event={"ID":"118c5392-b197-4d57-b07f-66e6f537c4e1","Type":"ContainerStarted","Data":"ef4190b154465d26267c4601628e5fa394a20e356d93dc036c6ba9ef0406e3d8"} Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.125768 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-xdsnd" event={"ID":"5637ae84-f53a-48d5-87c6-1de13d92c181","Type":"ContainerStarted","Data":"7cd603826522d1c822cb02b5946d2f517d32b2e0cf5b665551f242bcb5faca02"} Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.125846 4869 patch_prober.go:28] interesting pod/downloads-7954f5f757-g46p9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.125896 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-g46p9" podUID="054a5a6b-1556-42a3-a4bb-1c25470226cc" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.125972 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-xdsnd" Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.128180 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" event={"ID":"44c5913f-a9a0-4b9f-aa60-c6158d19a38a","Type":"ContainerStarted","Data":"23039b801e47c9fdf53e8d154aa8558897595887edf58d3d344f883707b60a00"} Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.130129 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" event={"ID":"e9ed3410-fb43-440e-8d7f-832850050d0c","Type":"ContainerStarted","Data":"9ebfcef35d521001a4d6519fe42b8d659210bfe75a69e34bed7b215d9b87a094"} Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.130800 4869 patch_prober.go:28] interesting pod/console-operator-58897d9998-xdsnd container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.17:8443/readyz\": dial tcp 10.217.0.17:8443: connect: connection refused" start-of-body= Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.130865 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-xdsnd" podUID="5637ae84-f53a-48d5-87c6-1de13d92c181" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.17:8443/readyz\": dial tcp 10.217.0.17:8443: connect: connection refused" Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.131455 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-cdspc" event={"ID":"04600a3e-ea6a-4828-bf49-4f97a92f2f4d","Type":"ContainerStarted","Data":"961447358bc933c9fed4800a11567d604dced3c0bb01e0b2fea3454714d830b0"} Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.137727 4869 generic.go:334] "Generic (PLEG): container finished" podID="1f81e19a-bac5-4dd5-a294-1a026d0b9cd6" containerID="d2ad60f89c3b7d9e4fbaddad9864a25dc35cd28ae5a91ff49c02b4733a80a75c" exitCode=0 Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.137955 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" event={"ID":"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6","Type":"ContainerDied","Data":"d2ad60f89c3b7d9e4fbaddad9864a25dc35cd28ae5a91ff49c02b4733a80a75c"} Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.139437 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-vplc5" event={"ID":"780c0d74-7c0b-4fd0-9ed9-23fdca188bf6","Type":"ContainerStarted","Data":"03b9cfad20d1500dd6945cf89a105191d6cf1dfa948ae573c0b79a9252f7a88a"} Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.141338 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-djqwl" event={"ID":"b2ce252a-14bb-4b72-b4f8-6532d0f9266e","Type":"ContainerStarted","Data":"922e70ba24ac38e77fdfb259da19d8bed0ac20f32c6e3886684685d261d3f7f4"} Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.156891 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-tnth8" event={"ID":"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b","Type":"ContainerStarted","Data":"b3a70f30e78e7c5d61e01299b8f0f7f6c6166e2646d2c675796e356ef67d933d"} Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.211308 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jkrt8" event={"ID":"4a057949-f846-40b0-bc8e-66c8c8d0d1d4","Type":"ContainerStarted","Data":"4e5fc17cf8191afed37d6c5ed450c78b150b0c6f5ca81db13cdd0d95ed73d05f"} Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.219166 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:33 crc kubenswrapper[4869]: E0130 10:56:33.221391 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:33.721364703 +0000 UTC m=+144.271240939 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.273847 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xr2sj" event={"ID":"8a9ab7f0-7df0-452e-a879-3a7344a1778f","Type":"ContainerStarted","Data":"9c83090ce53a46aef67ebdd34a94c7c720f59fb95f13d205cd39e76cbbed4244"} Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.293979 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq" Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.312006 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq" Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.321686 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:33 crc kubenswrapper[4869]: E0130 10:56:33.326078 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:33.826061055 +0000 UTC m=+144.375937291 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.359601 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-tt6p5" podStartSLOduration=123.359582576 podStartE2EDuration="2m3.359582576s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:33.358181683 +0000 UTC m=+143.908057779" watchObservedRunningTime="2026-01-30 10:56:33.359582576 +0000 UTC m=+143.909458642" Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.424320 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:33 crc kubenswrapper[4869]: E0130 10:56:33.424572 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:33.924528225 +0000 UTC m=+144.474404291 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.424830 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:33 crc kubenswrapper[4869]: E0130 10:56:33.426258 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:33.926238917 +0000 UTC m=+144.476115183 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.526511 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:33 crc kubenswrapper[4869]: E0130 10:56:33.526988 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:34.026965187 +0000 UTC m=+144.576841253 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.553882 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq" podStartSLOduration=123.553866605 podStartE2EDuration="2m3.553866605s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:33.552884024 +0000 UTC m=+144.102760090" watchObservedRunningTime="2026-01-30 10:56:33.553866605 +0000 UTC m=+144.103742671" Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.628955 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:33 crc kubenswrapper[4869]: E0130 10:56:33.629430 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:34.129411909 +0000 UTC m=+144.679287975 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.678063 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" podStartSLOduration=123.678042996 podStartE2EDuration="2m3.678042996s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:33.655268885 +0000 UTC m=+144.205144971" watchObservedRunningTime="2026-01-30 10:56:33.678042996 +0000 UTC m=+144.227919062" Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.679037 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j8fvx" podStartSLOduration=123.679028966 podStartE2EDuration="2m3.679028966s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:33.678324314 +0000 UTC m=+144.228200400" watchObservedRunningTime="2026-01-30 10:56:33.679028966 +0000 UTC m=+144.228905032" Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.718810 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-xdsnd" podStartSLOduration=123.718790929 podStartE2EDuration="2m3.718790929s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:33.71848061 +0000 UTC m=+144.268356676" watchObservedRunningTime="2026-01-30 10:56:33.718790929 +0000 UTC m=+144.268666995" Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.731772 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:33 crc kubenswrapper[4869]: E0130 10:56:33.732288 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:34.232267404 +0000 UTC m=+144.782143470 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.755685 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-g9lcg"] Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.761445 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs2xw" podStartSLOduration=123.761426941 podStartE2EDuration="2m3.761426941s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:33.760659828 +0000 UTC m=+144.310535904" watchObservedRunningTime="2026-01-30 10:56:33.761426941 +0000 UTC m=+144.311303007" Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.836696 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.836945 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ctbj2" podStartSLOduration=123.836929905 podStartE2EDuration="2m3.836929905s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:33.836244764 +0000 UTC m=+144.386120830" watchObservedRunningTime="2026-01-30 10:56:33.836929905 +0000 UTC m=+144.386805971" Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.838313 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-g46p9" podStartSLOduration=123.838299007 podStartE2EDuration="2m3.838299007s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:33.800113322 +0000 UTC m=+144.349989408" watchObservedRunningTime="2026-01-30 10:56:33.838299007 +0000 UTC m=+144.388175073" Jan 30 10:56:33 crc kubenswrapper[4869]: E0130 10:56:33.837105 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:34.33709031 +0000 UTC m=+144.886966376 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.864135 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qfdm9"] Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.912764 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-55vfj"] Jan 30 10:56:33 crc kubenswrapper[4869]: W0130 10:56:33.915082 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4b79e0dd_2d4a_4983_9ed6_fca070b074d7.slice/crio-45b3a998bb7dd153fe78b802045300261b8e82011217f148807fe9e57d728e94 WatchSource:0}: Error finding container 45b3a998bb7dd153fe78b802045300261b8e82011217f148807fe9e57d728e94: Status 404 returned error can't find the container with id 45b3a998bb7dd153fe78b802045300261b8e82011217f148807fe9e57d728e94 Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.932814 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2frn4"] Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.934501 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-mx9wm"] Jan 30 10:56:33 crc kubenswrapper[4869]: I0130 10:56:33.956772 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:33 crc kubenswrapper[4869]: E0130 10:56:33.957138 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:34.457118243 +0000 UTC m=+145.006994309 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.030852 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-br4ps"] Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.032869 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qgkdr" podStartSLOduration=124.032834573 podStartE2EDuration="2m4.032834573s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:34.018887254 +0000 UTC m=+144.568763330" watchObservedRunningTime="2026-01-30 10:56:34.032834573 +0000 UTC m=+144.582710639" Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.053393 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-k5cpz"] Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.059434 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:34 crc kubenswrapper[4869]: E0130 10:56:34.059892 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:34.559877615 +0000 UTC m=+145.109753681 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.075876 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f2zrw"] Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.079321 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-grv7k"] Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.087532 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496165-hwsc7"] Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.095525 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-tntbs"] Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.160314 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:34 crc kubenswrapper[4869]: E0130 10:56:34.160572 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:34.660528812 +0000 UTC m=+145.210404878 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.161009 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:34 crc kubenswrapper[4869]: E0130 10:56:34.161760 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:34.661739399 +0000 UTC m=+145.211615465 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:34 crc kubenswrapper[4869]: W0130 10:56:34.164689 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod624037a4_840b_4c6d_806b_6b0d2276328d.slice/crio-9c2820286cad1246665075e974e26eaa0988da7cd35021af77a4a319bdc7d662 WatchSource:0}: Error finding container 9c2820286cad1246665075e974e26eaa0988da7cd35021af77a4a319bdc7d662: Status 404 returned error can't find the container with id 9c2820286cad1246665075e974e26eaa0988da7cd35021af77a4a319bdc7d662 Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.263593 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:34 crc kubenswrapper[4869]: E0130 10:56:34.264049 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:34.764027047 +0000 UTC m=+145.313903123 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.279426 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-fn57m"] Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.310584 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s22tp"] Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.328054 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.348115 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xxd67"] Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.360707 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-rwgbc"] Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.364587 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:34 crc kubenswrapper[4869]: E0130 10:56:34.364966 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:34.864954942 +0000 UTC m=+145.414831008 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.371387 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jkrt8" event={"ID":"4a057949-f846-40b0-bc8e-66c8c8d0d1d4","Type":"ContainerStarted","Data":"62626957d41fced46db62d01ab258e05d51332e3f0f6e98a84fcbfb0d017102e"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.415326 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f2zrw" event={"ID":"7c385c07-3f8d-4f69-a0c6-c4e3d6d141ee","Type":"ContainerStarted","Data":"eb35bd78db7374a62c16fd66714e75bd0c60589622c5c942156125a6c35e883a"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.435467 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-tntbs" event={"ID":"c6d4c3d5-598b-48a3-8a46-3d4997a4e67c","Type":"ContainerStarted","Data":"921ad110cf56a43bfa4f539bdb23562d7737e4296a53d12c3828d8cbb3ffb7fa"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.459435 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xr2sj" event={"ID":"8a9ab7f0-7df0-452e-a879-3a7344a1778f","Type":"ContainerStarted","Data":"d52b52f56db75186b1c72bcf0232be741421dbf7258d7b257379293faebc3855"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.459654 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-4z8pc"] Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.474570 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:34 crc kubenswrapper[4869]: E0130 10:56:34.476142 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:34.976119293 +0000 UTC m=+145.525995559 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.495139 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-djqwl" event={"ID":"b2ce252a-14bb-4b72-b4f8-6532d0f9266e","Type":"ContainerStarted","Data":"04c39e8be6507674853fb69a9ce087d1a025e462dabba9970888ac3a816f0665"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.504201 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vxcwv" event={"ID":"bc978c96-efc2-4963-af6b-bd987cb81bed","Type":"ContainerStarted","Data":"7e6f1714ec145f17d46d6c2e0219c27339d69a75530658db63456d722fdc9f7c"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.504637 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vxcwv" Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.505725 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-55vfj" event={"ID":"77e5e845-c1c8-4915-9b8d-d716558c2528","Type":"ContainerStarted","Data":"6228c8e0a4d058705542ffc3c88fa185f1392232bdaf5e4a492fe00d67e95b1a"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.508561 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n5r98" event={"ID":"118c5392-b197-4d57-b07f-66e6f537c4e1","Type":"ContainerStarted","Data":"e38ac21f00641c09f5636a542d6cc0b9e74773026466756f112f27dcc947e506"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.510406 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mx9wm" event={"ID":"c8c818c5-31ab-4eff-88c7-a2f73ffe6eaa","Type":"ContainerStarted","Data":"3fc62afb9745b254a6b34e56dd7d72aac3fb6790971ee1a48e198f609d5ebe76"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.511426 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8hs7b" event={"ID":"ca984745-756b-429d-83d9-c7699271eaa1","Type":"ContainerStarted","Data":"52171b3921ba7aa91c5a554583c86fafb9d42305504509f1da08c36871e89ab4"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.511454 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8hs7b" event={"ID":"ca984745-756b-429d-83d9-c7699271eaa1","Type":"ContainerStarted","Data":"6d31ffdd14510081822b337c09a9f2ff4edeb68fc792fcf58c5d5399ebdbc729"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.529903 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" event={"ID":"1f81e19a-bac5-4dd5-a294-1a026d0b9cd6","Type":"ContainerStarted","Data":"c55ada69adec41b7f9873df3289283d59101ab8c4b9df61b58dbe77a49c2df9a"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.534617 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ndht4" event={"ID":"d807cf30-7238-4b0b-9363-3380cee63802","Type":"ContainerStarted","Data":"d1f5d42b5663b3c6cbceb16deaabd324605fc639de4428bb8cc27def2cd88bac"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.534667 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ndht4" event={"ID":"d807cf30-7238-4b0b-9363-3380cee63802","Type":"ContainerStarted","Data":"6607c34eae2f17e735ba9f0c981f66eda00de51f2a376f8230c3b20ba72b0e5e"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.543485 4869 generic.go:334] "Generic (PLEG): container finished" podID="af5a6cd3-3f75-431f-bea7-cd3197b8fa1b" containerID="b3a70f30e78e7c5d61e01299b8f0f7f6c6166e2646d2c675796e356ef67d933d" exitCode=0 Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.543552 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-tnth8" event={"ID":"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b","Type":"ContainerDied","Data":"b3a70f30e78e7c5d61e01299b8f0f7f6c6166e2646d2c675796e356ef67d933d"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.543577 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-tnth8" event={"ID":"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b","Type":"ContainerStarted","Data":"17648e756d59244d147aa405278a6fe3e2d5db849848f86a5bfb9fe0c3fa6090"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.555589 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8hs7b" podStartSLOduration=124.555556257 podStartE2EDuration="2m4.555556257s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:34.554417012 +0000 UTC m=+145.104293098" watchObservedRunningTime="2026-01-30 10:56:34.555556257 +0000 UTC m=+145.105432323" Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.556081 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2frn4" event={"ID":"b53b05e8-9e7c-48ad-9a9b-535ce038924e","Type":"ContainerStarted","Data":"f984e1920acaeab4a32e5ba27ee6c3407ec1cc53a4b0fa43f9044a017100ec12"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.586161 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:34 crc kubenswrapper[4869]: E0130 10:56:34.587770 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:35.087752438 +0000 UTC m=+145.637628504 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.604329 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" event={"ID":"e9ed3410-fb43-440e-8d7f-832850050d0c","Type":"ContainerStarted","Data":"2f410cfb2e66d44ec332f4b004bf979a07ff726899e4fe564d553c777d70d03d"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.605811 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.606782 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vxcwv" podStartSLOduration=124.606763413 podStartE2EDuration="2m4.606763413s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:34.602404729 +0000 UTC m=+145.152280795" watchObservedRunningTime="2026-01-30 10:56:34.606763413 +0000 UTC m=+145.156639479" Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.608594 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-k5cpz" event={"ID":"72e2a345-8b37-45c7-a59b-3935151f1a40","Type":"ContainerStarted","Data":"8a3ffbfab8aebeb0e1ca04ed252232b3c6e6ac301b9b94a4a4925ab67d4c3cc0"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.638930 4869 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-pr488 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.639005 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" podUID="e9ed3410-fb43-440e-8d7f-832850050d0c" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.639942 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n5r98" podStartSLOduration=124.639917173 podStartE2EDuration="2m4.639917173s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:34.623347143 +0000 UTC m=+145.173223209" watchObservedRunningTime="2026-01-30 10:56:34.639917173 +0000 UTC m=+145.189793239" Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.675548 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8khhj" event={"ID":"92b8f00c-4b15-49a1-ac91-aef68b07cb74","Type":"ContainerStarted","Data":"268853f1a945bcf9475163415efa26f6a67b92c05232d5265791e6d37f41bdf4"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.677284 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jkrt8" podStartSLOduration=124.677266272 podStartE2EDuration="2m4.677266272s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:34.675221099 +0000 UTC m=+145.225097165" watchObservedRunningTime="2026-01-30 10:56:34.677266272 +0000 UTC m=+145.227142338" Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.691520 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:34 crc kubenswrapper[4869]: E0130 10:56:34.692620 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:35.192592464 +0000 UTC m=+145.742468520 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.697941 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" event={"ID":"a945a8a2-155f-4e1d-a636-a04711e6e40c","Type":"ContainerStarted","Data":"86f6caadf3f31c0fc78b1dff7f3803ed8af965e2d928ca0afda38be1d7b84641"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.698727 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.702107 4869 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-dqfj8 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" start-of-body= Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.702141 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" podUID="a945a8a2-155f-4e1d-a636-a04711e6e40c" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.705855 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qgkdr" event={"ID":"7d256fa3-7889-415c-8e01-0b43802365a5","Type":"ContainerStarted","Data":"b472ee2b3b7fe5ec31d47640f9353a57a996fbb0e9a110d936ed389788563fb9"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.709606 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xr2sj" podStartSLOduration=124.709587217 podStartE2EDuration="2m4.709587217s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:34.708784892 +0000 UTC m=+145.258660978" watchObservedRunningTime="2026-01-30 10:56:34.709587217 +0000 UTC m=+145.259463283" Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.722570 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" event={"ID":"44c5913f-a9a0-4b9f-aa60-c6158d19a38a","Type":"ContainerStarted","Data":"4940af762021649fe2a2f10dab03be9c7650594d14e6eb542d31f2dfe45d3964"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.723265 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.735674 4869 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-rkwmf container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.735756 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" podUID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.740221 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" podStartSLOduration=124.740200539 podStartE2EDuration="2m4.740200539s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:34.737704202 +0000 UTC m=+145.287580268" watchObservedRunningTime="2026-01-30 10:56:34.740200539 +0000 UTC m=+145.290076635" Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.761032 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-cdspc" event={"ID":"04600a3e-ea6a-4828-bf49-4f97a92f2f4d","Type":"ContainerStarted","Data":"ad3182487515580a7539e02a5087fca4ad3fcd17eb6c87629b1ec819ada36d31"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.777291 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" podStartSLOduration=124.777270209 podStartE2EDuration="2m4.777270209s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:34.775896707 +0000 UTC m=+145.325772793" watchObservedRunningTime="2026-01-30 10:56:34.777270209 +0000 UTC m=+145.327146275" Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.798773 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:34 crc kubenswrapper[4869]: E0130 10:56:34.821746 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:35.30069368 +0000 UTC m=+145.850569736 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.825620 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-8khhj" podStartSLOduration=124.825602487 podStartE2EDuration="2m4.825602487s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:34.824014808 +0000 UTC m=+145.373890874" watchObservedRunningTime="2026-01-30 10:56:34.825602487 +0000 UTC m=+145.375478553" Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.826935 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-dvjw7" event={"ID":"083c9bb4-c4ed-4217-bff5-3babe25ac772","Type":"ContainerStarted","Data":"a524ea6d4e4470213ee901c15f67129be5dee8dd24a00d128d15882d4d63ee58"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.864666 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-vplc5" event={"ID":"780c0d74-7c0b-4fd0-9ed9-23fdca188bf6","Type":"ContainerStarted","Data":"a412f96483d0bcbe32575e2770c76496e8b392326fdc3be3add5b503cf73800f"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.882632 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ndht4" podStartSLOduration=124.882616121 podStartE2EDuration="2m4.882616121s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:34.880141095 +0000 UTC m=+145.430017161" watchObservedRunningTime="2026-01-30 10:56:34.882616121 +0000 UTC m=+145.432492187" Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.912989 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" podStartSLOduration=124.912972335 podStartE2EDuration="2m4.912972335s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:34.911299914 +0000 UTC m=+145.461175990" watchObservedRunningTime="2026-01-30 10:56:34.912972335 +0000 UTC m=+145.462848401" Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.916699 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:34 crc kubenswrapper[4869]: E0130 10:56:34.917920 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:35.417904187 +0000 UTC m=+145.967780253 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.945220 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-vplc5" podStartSLOduration=6.945202867 podStartE2EDuration="6.945202867s" podCreationTimestamp="2026-01-30 10:56:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:34.943556226 +0000 UTC m=+145.493432292" watchObservedRunningTime="2026-01-30 10:56:34.945202867 +0000 UTC m=+145.495078933" Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.950413 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496165-hwsc7" event={"ID":"624037a4-840b-4c6d-806b-6b0d2276328d","Type":"ContainerStarted","Data":"9c2820286cad1246665075e974e26eaa0988da7cd35021af77a4a319bdc7d662"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.960205 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qfdm9" event={"ID":"4b79e0dd-2d4a-4983-9ed6-fca070b074d7","Type":"ContainerStarted","Data":"45b3a998bb7dd153fe78b802045300261b8e82011217f148807fe9e57d728e94"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.978487 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-dvjw7" podStartSLOduration=124.97846467 podStartE2EDuration="2m4.97846467s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:34.975301073 +0000 UTC m=+145.525177149" watchObservedRunningTime="2026-01-30 10:56:34.97846467 +0000 UTC m=+145.528340736" Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.987650 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-br4ps" event={"ID":"2ca2af71-e5ad-4800-9d2c-d79a15a031ad","Type":"ContainerStarted","Data":"fd23177375665835dee3299a8ae2bc1311c979c4f2b6d0f8f45d2ae0d70060e8"} Jan 30 10:56:34 crc kubenswrapper[4869]: I0130 10:56:34.994111 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-g9lcg" event={"ID":"5bba43c5-f7f3-4939-a9a7-f191927f7d64","Type":"ContainerStarted","Data":"c61f1cf8a94f2a3d7ff8f99a2914860296f453c6bbe6d4fc142d1a376456da72"} Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.020615 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:35 crc kubenswrapper[4869]: E0130 10:56:35.021192 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:35.521172324 +0000 UTC m=+146.071048580 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.027636 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-z8qjp" event={"ID":"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2","Type":"ContainerStarted","Data":"ef14f342453f2db2ad1d31614fd7aaa6acfbd1b699709829a0ab1b4c3f4d3a69"} Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.034235 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-grv7k" event={"ID":"7adee160-9300-4ae8-b89e-c9a939b4f354","Type":"ContainerStarted","Data":"b5271dae4d2fab85f2737fac31504358b0d90fc66292125a5e109dcae1bea6b7"} Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.035272 4869 patch_prober.go:28] interesting pod/downloads-7954f5f757-g46p9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.035317 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-g46p9" podUID="054a5a6b-1556-42a3-a4bb-1c25470226cc" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.059049 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-cdspc" podStartSLOduration=125.059032129 podStartE2EDuration="2m5.059032129s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:35.001253051 +0000 UTC m=+145.551129117" watchObservedRunningTime="2026-01-30 10:56:35.059032129 +0000 UTC m=+145.608908195" Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.063004 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs2xw" Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.067048 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qfdm9" podStartSLOduration=125.067020785 podStartE2EDuration="2m5.067020785s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:35.058537214 +0000 UTC m=+145.608413300" watchObservedRunningTime="2026-01-30 10:56:35.067020785 +0000 UTC m=+145.616896851" Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.101536 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-xdsnd" Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.170469 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-z8qjp" podStartSLOduration=125.170444748 podStartE2EDuration="2m5.170444748s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:35.082782 +0000 UTC m=+145.632658066" watchObservedRunningTime="2026-01-30 10:56:35.170444748 +0000 UTC m=+145.720320834" Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.171776 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:35 crc kubenswrapper[4869]: E0130 10:56:35.172155 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:35.672111239 +0000 UTC m=+146.221987315 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.172695 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:35 crc kubenswrapper[4869]: E0130 10:56:35.180934 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:35.68091539 +0000 UTC m=+146.230791456 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.274054 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:35 crc kubenswrapper[4869]: E0130 10:56:35.275740 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:35.775701006 +0000 UTC m=+146.325577072 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.276127 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:35 crc kubenswrapper[4869]: E0130 10:56:35.276639 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:35.776623005 +0000 UTC m=+146.326499071 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.377539 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:35 crc kubenswrapper[4869]: E0130 10:56:35.378319 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:35.878299353 +0000 UTC m=+146.428175429 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.483457 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:35 crc kubenswrapper[4869]: E0130 10:56:35.483787 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:35.983775389 +0000 UTC m=+146.533651455 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.584216 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:35 crc kubenswrapper[4869]: E0130 10:56:35.584963 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:36.084944882 +0000 UTC m=+146.634820948 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.609887 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-dvjw7" Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.614191 4869 patch_prober.go:28] interesting pod/router-default-5444994796-dvjw7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 10:56:35 crc kubenswrapper[4869]: [-]has-synced failed: reason withheld Jan 30 10:56:35 crc kubenswrapper[4869]: [+]process-running ok Jan 30 10:56:35 crc kubenswrapper[4869]: healthz check failed Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.614243 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dvjw7" podUID="083c9bb4-c4ed-4217-bff5-3babe25ac772" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.686129 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:35 crc kubenswrapper[4869]: E0130 10:56:35.686497 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:36.186485886 +0000 UTC m=+146.736361942 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.786879 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:35 crc kubenswrapper[4869]: E0130 10:56:35.787077 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:36.287046671 +0000 UTC m=+146.836922747 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.787382 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:35 crc kubenswrapper[4869]: E0130 10:56:35.787767 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:36.287755093 +0000 UTC m=+146.837631159 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.888237 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:35 crc kubenswrapper[4869]: E0130 10:56:35.888452 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:36.38841992 +0000 UTC m=+146.938295986 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.888585 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:35 crc kubenswrapper[4869]: E0130 10:56:35.888924 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:36.388909615 +0000 UTC m=+146.938785681 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.938060 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.938107 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.946552 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.990096 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:35 crc kubenswrapper[4869]: E0130 10:56:35.990283 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:36.490255804 +0000 UTC m=+147.040131870 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:35 crc kubenswrapper[4869]: I0130 10:56:35.990400 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:35 crc kubenswrapper[4869]: E0130 10:56:35.990928 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:36.490920284 +0000 UTC m=+147.040796350 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.042132 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-55vfj" event={"ID":"77e5e845-c1c8-4915-9b8d-d716558c2528","Type":"ContainerStarted","Data":"654c466feee1fe6f8d48f6f2af791c3b890bcee83648c6212f8b33b664aff035"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.046574 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4z8pc" event={"ID":"2bcc963a-ffa6-45fb-a7c3-c83b891b74e0","Type":"ContainerStarted","Data":"92547c43e2be563a302b1a42e9d95ea768b3d7d334bac5e32d690ab3206a5978"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.046631 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4z8pc" event={"ID":"2bcc963a-ffa6-45fb-a7c3-c83b891b74e0","Type":"ContainerStarted","Data":"1b4e6746c93686e48f06b6d06ce970a81d52550ca4ea9c1171cc03f50163366e"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.046648 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4z8pc" event={"ID":"2bcc963a-ffa6-45fb-a7c3-c83b891b74e0","Type":"ContainerStarted","Data":"ccf3c0b2eb1afc382b7ad5abe939ea7b6cd0e9fdd86ebe03f3ab1ef7906cd982"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.048883 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-grv7k" event={"ID":"7adee160-9300-4ae8-b89e-c9a939b4f354","Type":"ContainerStarted","Data":"280b7bd3a1f86dc44a0cbb5c6cbfc281bdf5043c994726ef52dbcf306d7dcc5b"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.048937 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-grv7k" event={"ID":"7adee160-9300-4ae8-b89e-c9a939b4f354","Type":"ContainerStarted","Data":"b0e406057f618e2d8927d0b65cff005829319255c8254a1db4de96583ca0cb28"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.050899 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s22tp" event={"ID":"37701f0a-dcf3-407e-9331-76e8c8cd871e","Type":"ContainerStarted","Data":"9fa785ba3c136cae215610ac707023d7c463a7d21576dd66616026382222a735"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.050954 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s22tp" event={"ID":"37701f0a-dcf3-407e-9331-76e8c8cd871e","Type":"ContainerStarted","Data":"cf8be9e9c539d2b962b379deef28898a3e8bdf12e4cc427edf0394ff4e891c8c"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.052508 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2frn4" event={"ID":"b53b05e8-9e7c-48ad-9a9b-535ce038924e","Type":"ContainerStarted","Data":"9580a2893427f99bf485ac5b5f4cf3c734d005183a77d94b85c32cf859178031"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.052740 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2frn4" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.054425 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-k5cpz" event={"ID":"72e2a345-8b37-45c7-a59b-3935151f1a40","Type":"ContainerStarted","Data":"2b0dcae7312284259c3f95bd9f0137a76a58b814e3cd1f3c4e0c5a3c2de2f86c"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.054638 4869 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-2frn4 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.32:5443/healthz\": dial tcp 10.217.0.32:5443: connect: connection refused" start-of-body= Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.054696 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2frn4" podUID="b53b05e8-9e7c-48ad-9a9b-535ce038924e" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.32:5443/healthz\": dial tcp 10.217.0.32:5443: connect: connection refused" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.057432 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-g9lcg" event={"ID":"5bba43c5-f7f3-4939-a9a7-f191927f7d64","Type":"ContainerStarted","Data":"fe2476e85f4e25a2ffb8358e5d45ab07541774a3428a1aab5de75a2862c0f1ec"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.057472 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-g9lcg" event={"ID":"5bba43c5-f7f3-4939-a9a7-f191927f7d64","Type":"ContainerStarted","Data":"5d9fe809cd06b42daa45a0797d9dbd4f3b4c92abab86a0489fd3527cb99de9fb"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.059331 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-fn57m" event={"ID":"73188a34-7fd8-4169-8b59-693f746607ed","Type":"ContainerStarted","Data":"2c3c941eac80af12800427d220de1cbd30c8b671ad65f317b89f8d4c51948445"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.059375 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-fn57m" event={"ID":"73188a34-7fd8-4169-8b59-693f746607ed","Type":"ContainerStarted","Data":"573081ecb1aa0fb6ed2aa30016a85fdc770077b3819da5d828653a25f6fdfc4c"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.059389 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-fn57m" event={"ID":"73188a34-7fd8-4169-8b59-693f746607ed","Type":"ContainerStarted","Data":"48f02a3addd66ab9874675e8512b4c1a7579f8a85302fd33d8a2e3c7851a76d5"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.061385 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-tnth8" event={"ID":"af5a6cd3-3f75-431f-bea7-cd3197b8fa1b","Type":"ContainerStarted","Data":"ffbf3276bdc01bc02879fb340deeb9d313cdc849bfe36bb2e88a27c43bcc5e08"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.064601 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mx9wm" event={"ID":"c8c818c5-31ab-4eff-88c7-a2f73ffe6eaa","Type":"ContainerStarted","Data":"ce1e9f7a080e20cfdda3420feb5215a12888ed7ce3c4b0ff715dd4c563d1564c"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.064644 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mx9wm" event={"ID":"c8c818c5-31ab-4eff-88c7-a2f73ffe6eaa","Type":"ContainerStarted","Data":"38613d09fa1522be6038dff1e2dfcc073c26727b372cf1634b68e0c9bc7b61ff"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.066220 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qfdm9" event={"ID":"4b79e0dd-2d4a-4983-9ed6-fca070b074d7","Type":"ContainerStarted","Data":"7887792ab7535d0f43d21815a91aa58450f50b9f70071c3cb91009804f6eb7de"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.067870 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xxd67" event={"ID":"df4da501-b52b-426b-9e7a-e3b62240c111","Type":"ContainerStarted","Data":"48d97532fee50a8a1b660fe367cd81c93931d7504971f8bd001a712e76cbf35f"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.067922 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xxd67" event={"ID":"df4da501-b52b-426b-9e7a-e3b62240c111","Type":"ContainerStarted","Data":"9470214bc9bf23b63e13fbe4e5d04fb8638df413fefe749164da9f3f16a99cda"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.068108 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xxd67" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.069363 4869 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-xxd67 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.25:8443/healthz\": dial tcp 10.217.0.25:8443: connect: connection refused" start-of-body= Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.069421 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xxd67" podUID="df4da501-b52b-426b-9e7a-e3b62240c111" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.25:8443/healthz\": dial tcp 10.217.0.25:8443: connect: connection refused" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.071132 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-djqwl" event={"ID":"b2ce252a-14bb-4b72-b4f8-6532d0f9266e","Type":"ContainerStarted","Data":"e4fcfe47b01908fd10a868e50e57d1e6fccff65a3f845ffd37fd87d326df54df"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.071285 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-djqwl" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.072780 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f2zrw" event={"ID":"7c385c07-3f8d-4f69-a0c6-c4e3d6d141ee","Type":"ContainerStarted","Data":"174e749d25e5685626b570167d7c3bfaaae814b2b43c2b058404abb9ef4f98a8"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.072816 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f2zrw" event={"ID":"7c385c07-3f8d-4f69-a0c6-c4e3d6d141ee","Type":"ContainerStarted","Data":"44dc90cd97d394979436c9e89f0f4eda2bd8e3b87d547d69623b43faeb99beea"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.073461 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f2zrw" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.074652 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-br4ps" event={"ID":"2ca2af71-e5ad-4800-9d2c-d79a15a031ad","Type":"ContainerStarted","Data":"3d819952bfcc27b89ee879fadfeef05f208c75f0464b37f0ee6ab6567d696153"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.076158 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496165-hwsc7" event={"ID":"624037a4-840b-4c6d-806b-6b0d2276328d","Type":"ContainerStarted","Data":"79907ccc904a8e91f6ae91d0c6026a416f3e793ffd96165294ae8f767ea96913"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.080268 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rwgbc" event={"ID":"1e366e0f-3cc0-4742-9edf-28e5257e9310","Type":"ContainerStarted","Data":"73b3e6d96aa3d0dfd8e084454d223e7bce0a3314e2cefec7fa13c495fc0f3fce"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.080311 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rwgbc" event={"ID":"1e366e0f-3cc0-4742-9edf-28e5257e9310","Type":"ContainerStarted","Data":"953e22caf85c80f81ee0951c02c5a9c05c543748fd979ad86fd37adf6c4f8f72"} Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.082468 4869 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-rkwmf container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.082517 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" podUID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.091453 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:36 crc kubenswrapper[4869]: E0130 10:56:36.092792 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:36.592760988 +0000 UTC m=+147.142637054 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.094295 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z5mpb" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.105692 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.123497 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vxcwv" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.134008 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-55vfj" podStartSLOduration=126.133977545 podStartE2EDuration="2m6.133977545s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:36.131271392 +0000 UTC m=+146.681147458" watchObservedRunningTime="2026-01-30 10:56:36.133977545 +0000 UTC m=+146.683853611" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.194478 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:36 crc kubenswrapper[4869]: E0130 10:56:36.207877 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:36.707843658 +0000 UTC m=+147.257719724 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.209237 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-tnth8" podStartSLOduration=126.2092224 podStartE2EDuration="2m6.2092224s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:36.195671773 +0000 UTC m=+146.745547839" watchObservedRunningTime="2026-01-30 10:56:36.2092224 +0000 UTC m=+146.759098466" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.279217 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.279668 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.284869 4869 patch_prober.go:28] interesting pod/apiserver-76f77b778f-tnth8 container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.5:8443/livez\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.284924 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-tnth8" podUID="af5a6cd3-3f75-431f-bea7-cd3197b8fa1b" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.5:8443/livez\": dial tcp 10.217.0.5:8443: connect: connection refused" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.293994 4869 csr.go:261] certificate signing request csr-88tcs is approved, waiting to be issued Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.297246 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:36 crc kubenswrapper[4869]: E0130 10:56:36.298114 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:36.798086065 +0000 UTC m=+147.347962131 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.307317 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:36 crc kubenswrapper[4869]: E0130 10:56:36.307683 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:36.807670429 +0000 UTC m=+147.357546495 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.326930 4869 csr.go:257] certificate signing request csr-88tcs is issued Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.366978 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-br4ps" podStartSLOduration=126.366963364 podStartE2EDuration="2m6.366963364s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:36.364939352 +0000 UTC m=+146.914815438" watchObservedRunningTime="2026-01-30 10:56:36.366963364 +0000 UTC m=+146.916839430" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.408439 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:36 crc kubenswrapper[4869]: E0130 10:56:36.408855 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:36.908836722 +0000 UTC m=+147.458712788 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.448977 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29496165-hwsc7" podStartSLOduration=126.448959687 podStartE2EDuration="2m6.448959687s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:36.416397805 +0000 UTC m=+146.966273881" watchObservedRunningTime="2026-01-30 10:56:36.448959687 +0000 UTC m=+146.998835753" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.477463 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s22tp" podStartSLOduration=126.477446744 podStartE2EDuration="2m6.477446744s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:36.451877097 +0000 UTC m=+147.001753163" watchObservedRunningTime="2026-01-30 10:56:36.477446744 +0000 UTC m=+147.027322810" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.479083 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-g9lcg" podStartSLOduration=126.479075064 podStartE2EDuration="2m6.479075064s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:36.469679415 +0000 UTC m=+147.019555471" watchObservedRunningTime="2026-01-30 10:56:36.479075064 +0000 UTC m=+147.028951130" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.510493 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:36 crc kubenswrapper[4869]: E0130 10:56:36.510845 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:37.010830671 +0000 UTC m=+147.560706737 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.550236 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-grv7k" podStartSLOduration=126.550213673 podStartE2EDuration="2m6.550213673s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:36.507262881 +0000 UTC m=+147.057138947" watchObservedRunningTime="2026-01-30 10:56:36.550213673 +0000 UTC m=+147.100089739" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.552151 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f2zrw" podStartSLOduration=126.552138962 podStartE2EDuration="2m6.552138962s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:36.550154831 +0000 UTC m=+147.100030897" watchObservedRunningTime="2026-01-30 10:56:36.552138962 +0000 UTC m=+147.102015028" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.571030 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.611514 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-k5cpz" podStartSLOduration=8.611495878 podStartE2EDuration="8.611495878s" podCreationTimestamp="2026-01-30 10:56:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:36.585285912 +0000 UTC m=+147.135161988" watchObservedRunningTime="2026-01-30 10:56:36.611495878 +0000 UTC m=+147.161371944" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.612456 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:36 crc kubenswrapper[4869]: E0130 10:56:36.612856 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:37.11284301 +0000 UTC m=+147.662719076 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.627208 4869 patch_prober.go:28] interesting pod/router-default-5444994796-dvjw7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 10:56:36 crc kubenswrapper[4869]: [-]has-synced failed: reason withheld Jan 30 10:56:36 crc kubenswrapper[4869]: [+]process-running ok Jan 30 10:56:36 crc kubenswrapper[4869]: healthz check failed Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.627268 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dvjw7" podUID="083c9bb4-c4ed-4217-bff5-3babe25ac772" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.657270 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4z8pc" podStartSLOduration=126.657252706 podStartE2EDuration="2m6.657252706s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:36.655834953 +0000 UTC m=+147.205711019" watchObservedRunningTime="2026-01-30 10:56:36.657252706 +0000 UTC m=+147.207128792" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.713895 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:36 crc kubenswrapper[4869]: E0130 10:56:36.714228 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:37.214216839 +0000 UTC m=+147.764092905 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.752190 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-djqwl" podStartSLOduration=8.752177797 podStartE2EDuration="8.752177797s" podCreationTimestamp="2026-01-30 10:56:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:36.751690162 +0000 UTC m=+147.301566228" watchObservedRunningTime="2026-01-30 10:56:36.752177797 +0000 UTC m=+147.302053863" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.752740 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xxd67" podStartSLOduration=126.752735984 podStartE2EDuration="2m6.752735984s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:36.698615729 +0000 UTC m=+147.248491795" watchObservedRunningTime="2026-01-30 10:56:36.752735984 +0000 UTC m=+147.302612050" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.797168 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-fn57m" podStartSLOduration=126.797149291 podStartE2EDuration="2m6.797149291s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:36.795456429 +0000 UTC m=+147.345332495" watchObservedRunningTime="2026-01-30 10:56:36.797149291 +0000 UTC m=+147.347025357" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.814484 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:36 crc kubenswrapper[4869]: E0130 10:56:36.814680 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:37.31465523 +0000 UTC m=+147.864531296 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.814819 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:36 crc kubenswrapper[4869]: E0130 10:56:36.815100 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:37.315087513 +0000 UTC m=+147.864963579 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.826588 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mx9wm" podStartSLOduration=126.826558096 podStartE2EDuration="2m6.826558096s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:36.823806541 +0000 UTC m=+147.373682607" watchObservedRunningTime="2026-01-30 10:56:36.826558096 +0000 UTC m=+147.376434162" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.851400 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rwgbc" podStartSLOduration=126.85137692 podStartE2EDuration="2m6.85137692s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:36.849914495 +0000 UTC m=+147.399790571" watchObservedRunningTime="2026-01-30 10:56:36.85137692 +0000 UTC m=+147.401252986" Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.916127 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:36 crc kubenswrapper[4869]: E0130 10:56:36.916231 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:37.416211525 +0000 UTC m=+147.966087591 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.916549 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:36 crc kubenswrapper[4869]: E0130 10:56:36.916808 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:37.416799533 +0000 UTC m=+147.966675599 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:36 crc kubenswrapper[4869]: I0130 10:56:36.923218 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2frn4" podStartSLOduration=126.9232023 podStartE2EDuration="2m6.9232023s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:36.921957281 +0000 UTC m=+147.471833347" watchObservedRunningTime="2026-01-30 10:56:36.9232023 +0000 UTC m=+147.473078366" Jan 30 10:56:37 crc kubenswrapper[4869]: I0130 10:56:37.017290 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:37 crc kubenswrapper[4869]: E0130 10:56:37.017615 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:37.517599964 +0000 UTC m=+148.067476020 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:37 crc kubenswrapper[4869]: I0130 10:56:37.093027 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-tntbs" event={"ID":"c6d4c3d5-598b-48a3-8a46-3d4997a4e67c","Type":"ContainerStarted","Data":"ba92d8f1cc02006c263d75eaa8ad3240e82b49b98c6694206cc2b4368a1942aa"} Jan 30 10:56:37 crc kubenswrapper[4869]: I0130 10:56:37.111416 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xxd67" Jan 30 10:56:37 crc kubenswrapper[4869]: I0130 10:56:37.121436 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:37 crc kubenswrapper[4869]: E0130 10:56:37.132818 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:37.632799969 +0000 UTC m=+148.182676035 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:37 crc kubenswrapper[4869]: I0130 10:56:37.240233 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:37 crc kubenswrapper[4869]: E0130 10:56:37.240670 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:37.740651408 +0000 UTC m=+148.290527474 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:37 crc kubenswrapper[4869]: I0130 10:56:37.328403 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-01-30 10:51:36 +0000 UTC, rotation deadline is 2026-11-01 21:14:56.464034424 +0000 UTC Jan 30 10:56:37 crc kubenswrapper[4869]: I0130 10:56:37.328440 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 6610h18m19.135596825s for next certificate rotation Jan 30 10:56:37 crc kubenswrapper[4869]: I0130 10:56:37.341414 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:37 crc kubenswrapper[4869]: E0130 10:56:37.341797 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:37.84178248 +0000 UTC m=+148.391658556 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:37 crc kubenswrapper[4869]: I0130 10:56:37.442446 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:37 crc kubenswrapper[4869]: E0130 10:56:37.442911 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:37.942891831 +0000 UTC m=+148.492767897 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:37 crc kubenswrapper[4869]: I0130 10:56:37.543790 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:37 crc kubenswrapper[4869]: E0130 10:56:37.544093 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:38.044078754 +0000 UTC m=+148.593954820 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:37 crc kubenswrapper[4869]: I0130 10:56:37.592544 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2frn4" Jan 30 10:56:37 crc kubenswrapper[4869]: I0130 10:56:37.619102 4869 patch_prober.go:28] interesting pod/router-default-5444994796-dvjw7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 10:56:37 crc kubenswrapper[4869]: [-]has-synced failed: reason withheld Jan 30 10:56:37 crc kubenswrapper[4869]: [+]process-running ok Jan 30 10:56:37 crc kubenswrapper[4869]: healthz check failed Jan 30 10:56:37 crc kubenswrapper[4869]: I0130 10:56:37.619226 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dvjw7" podUID="083c9bb4-c4ed-4217-bff5-3babe25ac772" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 10:56:37 crc kubenswrapper[4869]: I0130 10:56:37.645223 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:37 crc kubenswrapper[4869]: E0130 10:56:37.645454 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:38.145430033 +0000 UTC m=+148.695306089 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:37 crc kubenswrapper[4869]: I0130 10:56:37.645654 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:37 crc kubenswrapper[4869]: E0130 10:56:37.645968 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:38.145955659 +0000 UTC m=+148.695831725 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:37 crc kubenswrapper[4869]: I0130 10:56:37.747363 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:37 crc kubenswrapper[4869]: E0130 10:56:37.747575 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:38.247523974 +0000 UTC m=+148.797400030 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:37 crc kubenswrapper[4869]: I0130 10:56:37.747777 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:37 crc kubenswrapper[4869]: E0130 10:56:37.748151 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:38.248136133 +0000 UTC m=+148.798012199 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:37 crc kubenswrapper[4869]: I0130 10:56:37.849599 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:37 crc kubenswrapper[4869]: E0130 10:56:37.849821 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:38.349793481 +0000 UTC m=+148.899669547 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:37 crc kubenswrapper[4869]: I0130 10:56:37.850015 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:37 crc kubenswrapper[4869]: E0130 10:56:37.850362 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:38.350348338 +0000 UTC m=+148.900224404 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:37 crc kubenswrapper[4869]: I0130 10:56:37.951223 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:37 crc kubenswrapper[4869]: E0130 10:56:37.951363 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:38.451341636 +0000 UTC m=+149.001217702 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:37 crc kubenswrapper[4869]: I0130 10:56:37.951481 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:37 crc kubenswrapper[4869]: E0130 10:56:37.951800 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:38.45179219 +0000 UTC m=+149.001668256 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.052472 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:38 crc kubenswrapper[4869]: E0130 10:56:38.052734 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:38.552683124 +0000 UTC m=+149.102559200 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.052788 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.052985 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.053052 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:56:38 crc kubenswrapper[4869]: E0130 10:56:38.053170 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:38.553157299 +0000 UTC m=+149.103033355 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.053981 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.068466 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.125627 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-tntbs" event={"ID":"c6d4c3d5-598b-48a3-8a46-3d4997a4e67c","Type":"ContainerStarted","Data":"d2e6e07b5b18c8e9b8650303594f11ef4c947f502439c77cab8f8af156a92b9a"} Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.125687 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-tntbs" event={"ID":"c6d4c3d5-598b-48a3-8a46-3d4997a4e67c","Type":"ContainerStarted","Data":"41abde4837be2de0fa2ca309750b852ac4aa9879a8e656aefa36e0e882c7dddf"} Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.154420 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:38 crc kubenswrapper[4869]: E0130 10:56:38.154746 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:38.654676383 +0000 UTC m=+149.204552449 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.154927 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.155033 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.155156 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:56:38 crc kubenswrapper[4869]: E0130 10:56:38.156094 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:38.656078496 +0000 UTC m=+149.205954562 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.171378 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.171876 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.172192 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.254632 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.256322 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:38 crc kubenswrapper[4869]: E0130 10:56:38.256471 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:38.756450934 +0000 UTC m=+149.306327000 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.256672 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:38 crc kubenswrapper[4869]: E0130 10:56:38.258590 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:38.75857609 +0000 UTC m=+149.308452156 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.331922 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-m7nxw"] Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.333321 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m7nxw" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.342291 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.345193 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m7nxw"] Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.357987 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:38 crc kubenswrapper[4869]: E0130 10:56:38.358505 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:38.858487634 +0000 UTC m=+149.408363700 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.454733 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.459442 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d28fe085-7a0e-4de6-8579-88e9583b87a3-utilities\") pod \"certified-operators-m7nxw\" (UID: \"d28fe085-7a0e-4de6-8579-88e9583b87a3\") " pod="openshift-marketplace/certified-operators-m7nxw" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.459496 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.459516 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d28fe085-7a0e-4de6-8579-88e9583b87a3-catalog-content\") pod \"certified-operators-m7nxw\" (UID: \"d28fe085-7a0e-4de6-8579-88e9583b87a3\") " pod="openshift-marketplace/certified-operators-m7nxw" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.459613 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmh7t\" (UniqueName: \"kubernetes.io/projected/d28fe085-7a0e-4de6-8579-88e9583b87a3-kube-api-access-nmh7t\") pod \"certified-operators-m7nxw\" (UID: \"d28fe085-7a0e-4de6-8579-88e9583b87a3\") " pod="openshift-marketplace/certified-operators-m7nxw" Jan 30 10:56:38 crc kubenswrapper[4869]: E0130 10:56:38.459844 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:38.959832453 +0000 UTC m=+149.509708519 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.546534 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gcxdk"] Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.547443 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gcxdk" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.549883 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.560931 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:38 crc kubenswrapper[4869]: E0130 10:56:38.560965 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:39.060945774 +0000 UTC m=+149.610821840 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.561070 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d28fe085-7a0e-4de6-8579-88e9583b87a3-utilities\") pod \"certified-operators-m7nxw\" (UID: \"d28fe085-7a0e-4de6-8579-88e9583b87a3\") " pod="openshift-marketplace/certified-operators-m7nxw" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.561121 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.561165 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d28fe085-7a0e-4de6-8579-88e9583b87a3-catalog-content\") pod \"certified-operators-m7nxw\" (UID: \"d28fe085-7a0e-4de6-8579-88e9583b87a3\") " pod="openshift-marketplace/certified-operators-m7nxw" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.561200 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmh7t\" (UniqueName: \"kubernetes.io/projected/d28fe085-7a0e-4de6-8579-88e9583b87a3-kube-api-access-nmh7t\") pod \"certified-operators-m7nxw\" (UID: \"d28fe085-7a0e-4de6-8579-88e9583b87a3\") " pod="openshift-marketplace/certified-operators-m7nxw" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.561546 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gcxdk"] Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.561758 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d28fe085-7a0e-4de6-8579-88e9583b87a3-utilities\") pod \"certified-operators-m7nxw\" (UID: \"d28fe085-7a0e-4de6-8579-88e9583b87a3\") " pod="openshift-marketplace/certified-operators-m7nxw" Jan 30 10:56:38 crc kubenswrapper[4869]: E0130 10:56:38.561881 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:39.061866102 +0000 UTC m=+149.611742168 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.561994 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d28fe085-7a0e-4de6-8579-88e9583b87a3-catalog-content\") pod \"certified-operators-m7nxw\" (UID: \"d28fe085-7a0e-4de6-8579-88e9583b87a3\") " pod="openshift-marketplace/certified-operators-m7nxw" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.611792 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmh7t\" (UniqueName: \"kubernetes.io/projected/d28fe085-7a0e-4de6-8579-88e9583b87a3-kube-api-access-nmh7t\") pod \"certified-operators-m7nxw\" (UID: \"d28fe085-7a0e-4de6-8579-88e9583b87a3\") " pod="openshift-marketplace/certified-operators-m7nxw" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.625987 4869 patch_prober.go:28] interesting pod/router-default-5444994796-dvjw7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 10:56:38 crc kubenswrapper[4869]: [-]has-synced failed: reason withheld Jan 30 10:56:38 crc kubenswrapper[4869]: [+]process-running ok Jan 30 10:56:38 crc kubenswrapper[4869]: healthz check failed Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.626049 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dvjw7" podUID="083c9bb4-c4ed-4217-bff5-3babe25ac772" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.649276 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m7nxw" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.663743 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.663901 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18c7bbb6-bdf6-4e26-9670-49a30b7dab22-catalog-content\") pod \"community-operators-gcxdk\" (UID: \"18c7bbb6-bdf6-4e26-9670-49a30b7dab22\") " pod="openshift-marketplace/community-operators-gcxdk" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.663928 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18c7bbb6-bdf6-4e26-9670-49a30b7dab22-utilities\") pod \"community-operators-gcxdk\" (UID: \"18c7bbb6-bdf6-4e26-9670-49a30b7dab22\") " pod="openshift-marketplace/community-operators-gcxdk" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.663977 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kch96\" (UniqueName: \"kubernetes.io/projected/18c7bbb6-bdf6-4e26-9670-49a30b7dab22-kube-api-access-kch96\") pod \"community-operators-gcxdk\" (UID: \"18c7bbb6-bdf6-4e26-9670-49a30b7dab22\") " pod="openshift-marketplace/community-operators-gcxdk" Jan 30 10:56:38 crc kubenswrapper[4869]: E0130 10:56:38.664105 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:39.164091558 +0000 UTC m=+149.713967624 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.741789 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gg5f8"] Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.744758 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gg5f8" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.787281 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18c7bbb6-bdf6-4e26-9670-49a30b7dab22-catalog-content\") pod \"community-operators-gcxdk\" (UID: \"18c7bbb6-bdf6-4e26-9670-49a30b7dab22\") " pod="openshift-marketplace/community-operators-gcxdk" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.787327 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18c7bbb6-bdf6-4e26-9670-49a30b7dab22-utilities\") pod \"community-operators-gcxdk\" (UID: \"18c7bbb6-bdf6-4e26-9670-49a30b7dab22\") " pod="openshift-marketplace/community-operators-gcxdk" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.787398 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.787417 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kch96\" (UniqueName: \"kubernetes.io/projected/18c7bbb6-bdf6-4e26-9670-49a30b7dab22-kube-api-access-kch96\") pod \"community-operators-gcxdk\" (UID: \"18c7bbb6-bdf6-4e26-9670-49a30b7dab22\") " pod="openshift-marketplace/community-operators-gcxdk" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.788106 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18c7bbb6-bdf6-4e26-9670-49a30b7dab22-catalog-content\") pod \"community-operators-gcxdk\" (UID: \"18c7bbb6-bdf6-4e26-9670-49a30b7dab22\") " pod="openshift-marketplace/community-operators-gcxdk" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.788326 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18c7bbb6-bdf6-4e26-9670-49a30b7dab22-utilities\") pod \"community-operators-gcxdk\" (UID: \"18c7bbb6-bdf6-4e26-9670-49a30b7dab22\") " pod="openshift-marketplace/community-operators-gcxdk" Jan 30 10:56:38 crc kubenswrapper[4869]: E0130 10:56:38.788545 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:39.288534287 +0000 UTC m=+149.838410353 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.791223 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gg5f8"] Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.871663 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kch96\" (UniqueName: \"kubernetes.io/projected/18c7bbb6-bdf6-4e26-9670-49a30b7dab22-kube-api-access-kch96\") pod \"community-operators-gcxdk\" (UID: \"18c7bbb6-bdf6-4e26-9670-49a30b7dab22\") " pod="openshift-marketplace/community-operators-gcxdk" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.894200 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.894435 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46b8829e-b1f9-4b12-bcbf-da01b6ef5840-utilities\") pod \"certified-operators-gg5f8\" (UID: \"46b8829e-b1f9-4b12-bcbf-da01b6ef5840\") " pod="openshift-marketplace/certified-operators-gg5f8" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.894490 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zcwq\" (UniqueName: \"kubernetes.io/projected/46b8829e-b1f9-4b12-bcbf-da01b6ef5840-kube-api-access-4zcwq\") pod \"certified-operators-gg5f8\" (UID: \"46b8829e-b1f9-4b12-bcbf-da01b6ef5840\") " pod="openshift-marketplace/certified-operators-gg5f8" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.894514 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46b8829e-b1f9-4b12-bcbf-da01b6ef5840-catalog-content\") pod \"certified-operators-gg5f8\" (UID: \"46b8829e-b1f9-4b12-bcbf-da01b6ef5840\") " pod="openshift-marketplace/certified-operators-gg5f8" Jan 30 10:56:38 crc kubenswrapper[4869]: E0130 10:56:38.894632 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:39.394616561 +0000 UTC m=+149.944492627 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.917150 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mvwzr"] Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.922571 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mvwzr" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.944205 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mvwzr"] Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.999505 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zcwq\" (UniqueName: \"kubernetes.io/projected/46b8829e-b1f9-4b12-bcbf-da01b6ef5840-kube-api-access-4zcwq\") pod \"certified-operators-gg5f8\" (UID: \"46b8829e-b1f9-4b12-bcbf-da01b6ef5840\") " pod="openshift-marketplace/certified-operators-gg5f8" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.999539 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkx4w\" (UniqueName: \"kubernetes.io/projected/0229a79c-920d-482f-ab9f-92b042caee36-kube-api-access-wkx4w\") pod \"community-operators-mvwzr\" (UID: \"0229a79c-920d-482f-ab9f-92b042caee36\") " pod="openshift-marketplace/community-operators-mvwzr" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.999563 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46b8829e-b1f9-4b12-bcbf-da01b6ef5840-catalog-content\") pod \"certified-operators-gg5f8\" (UID: \"46b8829e-b1f9-4b12-bcbf-da01b6ef5840\") " pod="openshift-marketplace/certified-operators-gg5f8" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.999607 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0229a79c-920d-482f-ab9f-92b042caee36-utilities\") pod \"community-operators-mvwzr\" (UID: \"0229a79c-920d-482f-ab9f-92b042caee36\") " pod="openshift-marketplace/community-operators-mvwzr" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.999651 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46b8829e-b1f9-4b12-bcbf-da01b6ef5840-utilities\") pod \"certified-operators-gg5f8\" (UID: \"46b8829e-b1f9-4b12-bcbf-da01b6ef5840\") " pod="openshift-marketplace/certified-operators-gg5f8" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.999871 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0229a79c-920d-482f-ab9f-92b042caee36-catalog-content\") pod \"community-operators-mvwzr\" (UID: \"0229a79c-920d-482f-ab9f-92b042caee36\") " pod="openshift-marketplace/community-operators-mvwzr" Jan 30 10:56:38 crc kubenswrapper[4869]: I0130 10:56:38.999892 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:39 crc kubenswrapper[4869]: E0130 10:56:39.000169 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:39.500155499 +0000 UTC m=+150.050031575 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.001508 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46b8829e-b1f9-4b12-bcbf-da01b6ef5840-catalog-content\") pod \"certified-operators-gg5f8\" (UID: \"46b8829e-b1f9-4b12-bcbf-da01b6ef5840\") " pod="openshift-marketplace/certified-operators-gg5f8" Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.001800 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46b8829e-b1f9-4b12-bcbf-da01b6ef5840-utilities\") pod \"certified-operators-gg5f8\" (UID: \"46b8829e-b1f9-4b12-bcbf-da01b6ef5840\") " pod="openshift-marketplace/certified-operators-gg5f8" Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.094538 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zcwq\" (UniqueName: \"kubernetes.io/projected/46b8829e-b1f9-4b12-bcbf-da01b6ef5840-kube-api-access-4zcwq\") pod \"certified-operators-gg5f8\" (UID: \"46b8829e-b1f9-4b12-bcbf-da01b6ef5840\") " pod="openshift-marketplace/certified-operators-gg5f8" Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.095539 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gg5f8" Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.101003 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.101163 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0229a79c-920d-482f-ab9f-92b042caee36-catalog-content\") pod \"community-operators-mvwzr\" (UID: \"0229a79c-920d-482f-ab9f-92b042caee36\") " pod="openshift-marketplace/community-operators-mvwzr" Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.101222 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkx4w\" (UniqueName: \"kubernetes.io/projected/0229a79c-920d-482f-ab9f-92b042caee36-kube-api-access-wkx4w\") pod \"community-operators-mvwzr\" (UID: \"0229a79c-920d-482f-ab9f-92b042caee36\") " pod="openshift-marketplace/community-operators-mvwzr" Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.101265 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0229a79c-920d-482f-ab9f-92b042caee36-utilities\") pod \"community-operators-mvwzr\" (UID: \"0229a79c-920d-482f-ab9f-92b042caee36\") " pod="openshift-marketplace/community-operators-mvwzr" Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.101608 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0229a79c-920d-482f-ab9f-92b042caee36-utilities\") pod \"community-operators-mvwzr\" (UID: \"0229a79c-920d-482f-ab9f-92b042caee36\") " pod="openshift-marketplace/community-operators-mvwzr" Jan 30 10:56:39 crc kubenswrapper[4869]: E0130 10:56:39.101673 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:39.601658672 +0000 UTC m=+150.151534738 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.101896 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0229a79c-920d-482f-ab9f-92b042caee36-catalog-content\") pod \"community-operators-mvwzr\" (UID: \"0229a79c-920d-482f-ab9f-92b042caee36\") " pod="openshift-marketplace/community-operators-mvwzr" Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.165787 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gcxdk" Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.168204 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkx4w\" (UniqueName: \"kubernetes.io/projected/0229a79c-920d-482f-ab9f-92b042caee36-kube-api-access-wkx4w\") pod \"community-operators-mvwzr\" (UID: \"0229a79c-920d-482f-ab9f-92b042caee36\") " pod="openshift-marketplace/community-operators-mvwzr" Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.180893 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-tntbs" event={"ID":"c6d4c3d5-598b-48a3-8a46-3d4997a4e67c","Type":"ContainerStarted","Data":"964585e79e2a61857a7273e865c88147462bcbd47d6d8ed87890b9e68946e955"} Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.206435 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:39 crc kubenswrapper[4869]: E0130 10:56:39.206732 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:39.706720345 +0000 UTC m=+150.256596411 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.258283 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mvwzr" Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.313975 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:39 crc kubenswrapper[4869]: E0130 10:56:39.315408 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:39.815388298 +0000 UTC m=+150.365264374 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.355319 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-tntbs" podStartSLOduration=11.355295756 podStartE2EDuration="11.355295756s" podCreationTimestamp="2026-01-30 10:56:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:39.259102896 +0000 UTC m=+149.808978962" watchObservedRunningTime="2026-01-30 10:56:39.355295756 +0000 UTC m=+149.905171822" Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.419594 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:39 crc kubenswrapper[4869]: E0130 10:56:39.419956 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:39.919941096 +0000 UTC m=+150.469817162 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.521394 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:39 crc kubenswrapper[4869]: E0130 10:56:39.521541 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:40.021517181 +0000 UTC m=+150.571393247 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.521697 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:39 crc kubenswrapper[4869]: E0130 10:56:39.521984 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:40.021976545 +0000 UTC m=+150.571852611 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.612812 4869 patch_prober.go:28] interesting pod/router-default-5444994796-dvjw7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 10:56:39 crc kubenswrapper[4869]: [-]has-synced failed: reason withheld Jan 30 10:56:39 crc kubenswrapper[4869]: [+]process-running ok Jan 30 10:56:39 crc kubenswrapper[4869]: healthz check failed Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.612867 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dvjw7" podUID="083c9bb4-c4ed-4217-bff5-3babe25ac772" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.623194 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:39 crc kubenswrapper[4869]: E0130 10:56:39.623479 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:40.123465528 +0000 UTC m=+150.673341594 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.624160 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gg5f8"] Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.725309 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:39 crc kubenswrapper[4869]: E0130 10:56:39.726010 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:40.225995822 +0000 UTC m=+150.775871888 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.741140 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.741924 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.747140 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.747382 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.758591 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m7nxw"] Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.759892 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.807994 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gcxdk"] Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.831184 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:39 crc kubenswrapper[4869]: E0130 10:56:39.831397 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:40.331375044 +0000 UTC m=+150.881251110 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.831518 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.831642 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/570acf30-50dd-4eb1-80ad-6d7d18af91ee-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"570acf30-50dd-4eb1-80ad-6d7d18af91ee\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.831675 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/570acf30-50dd-4eb1-80ad-6d7d18af91ee-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"570acf30-50dd-4eb1-80ad-6d7d18af91ee\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 10:56:39 crc kubenswrapper[4869]: E0130 10:56:39.831991 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:40.331974613 +0000 UTC m=+150.881850679 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.936281 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:39 crc kubenswrapper[4869]: E0130 10:56:39.937298 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:40.437271693 +0000 UTC m=+150.987147759 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.939095 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.939678 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/570acf30-50dd-4eb1-80ad-6d7d18af91ee-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"570acf30-50dd-4eb1-80ad-6d7d18af91ee\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.939845 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/570acf30-50dd-4eb1-80ad-6d7d18af91ee-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"570acf30-50dd-4eb1-80ad-6d7d18af91ee\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.940844 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/570acf30-50dd-4eb1-80ad-6d7d18af91ee-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"570acf30-50dd-4eb1-80ad-6d7d18af91ee\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 10:56:39 crc kubenswrapper[4869]: E0130 10:56:39.946652 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:40.446522658 +0000 UTC m=+150.996398724 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.967479 4869 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Jan 30 10:56:39 crc kubenswrapper[4869]: I0130 10:56:39.974624 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/570acf30-50dd-4eb1-80ad-6d7d18af91ee-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"570acf30-50dd-4eb1-80ad-6d7d18af91ee\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.021903 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mvwzr"] Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.041724 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:40 crc kubenswrapper[4869]: E0130 10:56:40.041958 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:40.541937854 +0000 UTC m=+151.091813920 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.042038 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:40 crc kubenswrapper[4869]: E0130 10:56:40.042421 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:40.542405598 +0000 UTC m=+151.092281664 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:40 crc kubenswrapper[4869]: W0130 10:56:40.056406 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0229a79c_920d_482f_ab9f_92b042caee36.slice/crio-11e7009314ac18bc2201cf32d19673fef90d50c52af2779f430032d766895ce5 WatchSource:0}: Error finding container 11e7009314ac18bc2201cf32d19673fef90d50c52af2779f430032d766895ce5: Status 404 returned error can't find the container with id 11e7009314ac18bc2201cf32d19673fef90d50c52af2779f430032d766895ce5 Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.066282 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.143145 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:40 crc kubenswrapper[4869]: E0130 10:56:40.143411 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 10:56:40.643395966 +0000 UTC m=+151.193272032 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.193269 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"fc1687e468898862b77b281439e7e795a6f2e49e2155ba0bd5ba4409aa192e26"} Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.193578 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"b518c069109fe9b1710bf1ee6d4a53470c4a21901b101e9bef91563179c00a65"} Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.193868 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.195335 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"fce5c75bfbb1cb1f6b707b3af639c90b52f6f5ec4a671b13948200242dcb3dde"} Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.195358 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"2a1f6c3729137a87744f5c83fab34a4b3d51b0e8f7bca7cedd4620aa70780c58"} Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.197948 4869 generic.go:334] "Generic (PLEG): container finished" podID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" containerID="296db95de69b512c57293facb3afb31c9828e48b2c80c1d69d0a717381ff9469" exitCode=0 Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.197992 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gcxdk" event={"ID":"18c7bbb6-bdf6-4e26-9670-49a30b7dab22","Type":"ContainerDied","Data":"296db95de69b512c57293facb3afb31c9828e48b2c80c1d69d0a717381ff9469"} Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.198009 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gcxdk" event={"ID":"18c7bbb6-bdf6-4e26-9670-49a30b7dab22","Type":"ContainerStarted","Data":"feaf8db97665ad2d45ddf0b3158cedacbd035581c14e79ebe8ba3dcd181914db"} Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.201032 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.203509 4869 generic.go:334] "Generic (PLEG): container finished" podID="46b8829e-b1f9-4b12-bcbf-da01b6ef5840" containerID="c63703a3c1013118e48a3c457a39cf5f9d690d9cf33addea470005d0aa04b82a" exitCode=0 Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.203601 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gg5f8" event={"ID":"46b8829e-b1f9-4b12-bcbf-da01b6ef5840","Type":"ContainerDied","Data":"c63703a3c1013118e48a3c457a39cf5f9d690d9cf33addea470005d0aa04b82a"} Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.203649 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gg5f8" event={"ID":"46b8829e-b1f9-4b12-bcbf-da01b6ef5840","Type":"ContainerStarted","Data":"224ee287aff00e076cab51cd4c324fc0e31a06a722cc37f5fcd5696d50831f88"} Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.214414 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"f440ec0773c13a1b505636f5f8e6ddfa3adf36984fbb904c3f95ffc161c2c2db"} Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.214452 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"acc14a1b8fe5518ed9e724b4e4a2232a2cd82ebb31779dda1f72ddf27e5dd4d7"} Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.225240 4869 generic.go:334] "Generic (PLEG): container finished" podID="d28fe085-7a0e-4de6-8579-88e9583b87a3" containerID="469c241b1c8ffafef148bc223299795896cbaaaa31470e02b39c5b54ecb94a44" exitCode=0 Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.225295 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m7nxw" event={"ID":"d28fe085-7a0e-4de6-8579-88e9583b87a3","Type":"ContainerDied","Data":"469c241b1c8ffafef148bc223299795896cbaaaa31470e02b39c5b54ecb94a44"} Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.225318 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m7nxw" event={"ID":"d28fe085-7a0e-4de6-8579-88e9583b87a3","Type":"ContainerStarted","Data":"e995cf49722965579c19fef06427505d500e5691e2d71c8ab6272a645509515e"} Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.236545 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mvwzr" event={"ID":"0229a79c-920d-482f-ab9f-92b042caee36","Type":"ContainerStarted","Data":"11e7009314ac18bc2201cf32d19673fef90d50c52af2779f430032d766895ce5"} Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.245227 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:40 crc kubenswrapper[4869]: E0130 10:56:40.245785 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 10:56:40.745767946 +0000 UTC m=+151.295644012 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dznqv" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.302064 4869 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-01-30T10:56:39.967509443Z","Handler":null,"Name":""} Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.320639 4869 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.320674 4869 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.329396 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.347675 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.360441 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.449362 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.454312 4869 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.454356 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.488309 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dznqv\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.616342 4869 patch_prober.go:28] interesting pod/router-default-5444994796-dvjw7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 10:56:40 crc kubenswrapper[4869]: [-]has-synced failed: reason withheld Jan 30 10:56:40 crc kubenswrapper[4869]: [+]process-running ok Jan 30 10:56:40 crc kubenswrapper[4869]: healthz check failed Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.616766 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dvjw7" podUID="083c9bb4-c4ed-4217-bff5-3babe25ac772" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.701947 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ht8ck"] Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.703303 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ht8ck" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.705485 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.718566 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.719362 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.721477 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.722157 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.722670 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ht8ck"] Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.745672 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.749964 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.853103 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/820371c0-12a8-4832-9d2a-c49ecf8b6cc7-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"820371c0-12a8-4832-9d2a-c49ecf8b6cc7\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.853149 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2db1788d-f11c-49f6-b613-d3ed750c8d8a-utilities\") pod \"redhat-marketplace-ht8ck\" (UID: \"2db1788d-f11c-49f6-b613-d3ed750c8d8a\") " pod="openshift-marketplace/redhat-marketplace-ht8ck" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.853187 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2db1788d-f11c-49f6-b613-d3ed750c8d8a-catalog-content\") pod \"redhat-marketplace-ht8ck\" (UID: \"2db1788d-f11c-49f6-b613-d3ed750c8d8a\") " pod="openshift-marketplace/redhat-marketplace-ht8ck" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.853213 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5l6c\" (UniqueName: \"kubernetes.io/projected/2db1788d-f11c-49f6-b613-d3ed750c8d8a-kube-api-access-l5l6c\") pod \"redhat-marketplace-ht8ck\" (UID: \"2db1788d-f11c-49f6-b613-d3ed750c8d8a\") " pod="openshift-marketplace/redhat-marketplace-ht8ck" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.853284 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/820371c0-12a8-4832-9d2a-c49ecf8b6cc7-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"820371c0-12a8-4832-9d2a-c49ecf8b6cc7\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.955621 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/820371c0-12a8-4832-9d2a-c49ecf8b6cc7-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"820371c0-12a8-4832-9d2a-c49ecf8b6cc7\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.955673 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2db1788d-f11c-49f6-b613-d3ed750c8d8a-utilities\") pod \"redhat-marketplace-ht8ck\" (UID: \"2db1788d-f11c-49f6-b613-d3ed750c8d8a\") " pod="openshift-marketplace/redhat-marketplace-ht8ck" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.955782 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/820371c0-12a8-4832-9d2a-c49ecf8b6cc7-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"820371c0-12a8-4832-9d2a-c49ecf8b6cc7\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.955820 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2db1788d-f11c-49f6-b613-d3ed750c8d8a-catalog-content\") pod \"redhat-marketplace-ht8ck\" (UID: \"2db1788d-f11c-49f6-b613-d3ed750c8d8a\") " pod="openshift-marketplace/redhat-marketplace-ht8ck" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.956148 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5l6c\" (UniqueName: \"kubernetes.io/projected/2db1788d-f11c-49f6-b613-d3ed750c8d8a-kube-api-access-l5l6c\") pod \"redhat-marketplace-ht8ck\" (UID: \"2db1788d-f11c-49f6-b613-d3ed750c8d8a\") " pod="openshift-marketplace/redhat-marketplace-ht8ck" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.956247 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/820371c0-12a8-4832-9d2a-c49ecf8b6cc7-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"820371c0-12a8-4832-9d2a-c49ecf8b6cc7\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.956379 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2db1788d-f11c-49f6-b613-d3ed750c8d8a-catalog-content\") pod \"redhat-marketplace-ht8ck\" (UID: \"2db1788d-f11c-49f6-b613-d3ed750c8d8a\") " pod="openshift-marketplace/redhat-marketplace-ht8ck" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.956484 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2db1788d-f11c-49f6-b613-d3ed750c8d8a-utilities\") pod \"redhat-marketplace-ht8ck\" (UID: \"2db1788d-f11c-49f6-b613-d3ed750c8d8a\") " pod="openshift-marketplace/redhat-marketplace-ht8ck" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.980551 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5l6c\" (UniqueName: \"kubernetes.io/projected/2db1788d-f11c-49f6-b613-d3ed750c8d8a-kube-api-access-l5l6c\") pod \"redhat-marketplace-ht8ck\" (UID: \"2db1788d-f11c-49f6-b613-d3ed750c8d8a\") " pod="openshift-marketplace/redhat-marketplace-ht8ck" Jan 30 10:56:40 crc kubenswrapper[4869]: I0130 10:56:40.993198 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/820371c0-12a8-4832-9d2a-c49ecf8b6cc7-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"820371c0-12a8-4832-9d2a-c49ecf8b6cc7\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.020300 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ht8ck" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.039583 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-dznqv"] Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.039934 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.104872 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8g8t8"] Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.110034 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8g8t8" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.123978 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8g8t8"] Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.158088 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5-catalog-content\") pod \"redhat-marketplace-8g8t8\" (UID: \"a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5\") " pod="openshift-marketplace/redhat-marketplace-8g8t8" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.158150 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7pvg\" (UniqueName: \"kubernetes.io/projected/a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5-kube-api-access-x7pvg\") pod \"redhat-marketplace-8g8t8\" (UID: \"a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5\") " pod="openshift-marketplace/redhat-marketplace-8g8t8" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.158176 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5-utilities\") pod \"redhat-marketplace-8g8t8\" (UID: \"a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5\") " pod="openshift-marketplace/redhat-marketplace-8g8t8" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.259694 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5-catalog-content\") pod \"redhat-marketplace-8g8t8\" (UID: \"a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5\") " pod="openshift-marketplace/redhat-marketplace-8g8t8" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.259763 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7pvg\" (UniqueName: \"kubernetes.io/projected/a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5-kube-api-access-x7pvg\") pod \"redhat-marketplace-8g8t8\" (UID: \"a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5\") " pod="openshift-marketplace/redhat-marketplace-8g8t8" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.259792 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5-utilities\") pod \"redhat-marketplace-8g8t8\" (UID: \"a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5\") " pod="openshift-marketplace/redhat-marketplace-8g8t8" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.260501 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5-utilities\") pod \"redhat-marketplace-8g8t8\" (UID: \"a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5\") " pod="openshift-marketplace/redhat-marketplace-8g8t8" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.262386 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5-catalog-content\") pod \"redhat-marketplace-8g8t8\" (UID: \"a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5\") " pod="openshift-marketplace/redhat-marketplace-8g8t8" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.273351 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" event={"ID":"b66a8fd2-73df-48dd-b697-95b2c50e01cd","Type":"ContainerStarted","Data":"4212704624e9d02b68d368d0d373d740046acc03546b2711c76f5d7326189e79"} Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.273423 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" event={"ID":"b66a8fd2-73df-48dd-b697-95b2c50e01cd","Type":"ContainerStarted","Data":"661fdaec0df62b3577c281399e0723d8c793af3f9f0389d0728e271a612af86f"} Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.273491 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.288090 4869 generic.go:334] "Generic (PLEG): container finished" podID="0229a79c-920d-482f-ab9f-92b042caee36" containerID="5c67dbc7ed126ef5c2340b24fd76e2ae25921bed07f788f86eb51c990e311991" exitCode=0 Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.288376 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mvwzr" event={"ID":"0229a79c-920d-482f-ab9f-92b042caee36","Type":"ContainerDied","Data":"5c67dbc7ed126ef5c2340b24fd76e2ae25921bed07f788f86eb51c990e311991"} Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.289750 4869 patch_prober.go:28] interesting pod/downloads-7954f5f757-g46p9 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.289776 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-g46p9" podUID="054a5a6b-1556-42a3-a4bb-1c25470226cc" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.291465 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.293578 4869 patch_prober.go:28] interesting pod/downloads-7954f5f757-g46p9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.293660 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-g46p9" podUID="054a5a6b-1556-42a3-a4bb-1c25470226cc" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.299517 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-tnth8" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.309119 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" podStartSLOduration=131.309100975 podStartE2EDuration="2m11.309100975s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:41.303132001 +0000 UTC m=+151.853008057" watchObservedRunningTime="2026-01-30 10:56:41.309100975 +0000 UTC m=+151.858977041" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.310286 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"570acf30-50dd-4eb1-80ad-6d7d18af91ee","Type":"ContainerStarted","Data":"5fb0e70f506b6a988a0356513e70be9ec4728a3fecf83113510e97ec4deef3e9"} Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.310319 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"570acf30-50dd-4eb1-80ad-6d7d18af91ee","Type":"ContainerStarted","Data":"baaf0b0e1ddc17fe379bb016d890038dabf5a5ae9482197e879fce3dc15b19a8"} Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.317835 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7pvg\" (UniqueName: \"kubernetes.io/projected/a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5-kube-api-access-x7pvg\") pod \"redhat-marketplace-8g8t8\" (UID: \"a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5\") " pod="openshift-marketplace/redhat-marketplace-8g8t8" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.413381 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.413361663 podStartE2EDuration="2.413361663s" podCreationTimestamp="2026-01-30 10:56:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:41.400517008 +0000 UTC m=+151.950393074" watchObservedRunningTime="2026-01-30 10:56:41.413361663 +0000 UTC m=+151.963237719" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.447250 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8g8t8" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.505661 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dhxch"] Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.514911 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dhxch" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.518529 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.525453 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dhxch"] Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.542472 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.543518 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.564593 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ht8ck"] Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.568675 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/112aad1f-e2f9-41e8-a9c8-1d3b3297528e-catalog-content\") pod \"redhat-operators-dhxch\" (UID: \"112aad1f-e2f9-41e8-a9c8-1d3b3297528e\") " pod="openshift-marketplace/redhat-operators-dhxch" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.568773 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jzsl\" (UniqueName: \"kubernetes.io/projected/112aad1f-e2f9-41e8-a9c8-1d3b3297528e-kube-api-access-7jzsl\") pod \"redhat-operators-dhxch\" (UID: \"112aad1f-e2f9-41e8-a9c8-1d3b3297528e\") " pod="openshift-marketplace/redhat-operators-dhxch" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.568799 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/112aad1f-e2f9-41e8-a9c8-1d3b3297528e-utilities\") pod \"redhat-operators-dhxch\" (UID: \"112aad1f-e2f9-41e8-a9c8-1d3b3297528e\") " pod="openshift-marketplace/redhat-operators-dhxch" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.569345 4869 patch_prober.go:28] interesting pod/console-f9d7485db-z8qjp container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.15:8443/health\": dial tcp 10.217.0.15:8443: connect: connection refused" start-of-body= Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.569377 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-z8qjp" podUID="0f3523c0-5e3b-435a-b83d-83c3a0c4dca2" containerName="console" probeResult="failure" output="Get \"https://10.217.0.15:8443/health\": dial tcp 10.217.0.15:8443: connect: connection refused" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.615906 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-dvjw7" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.670320 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/112aad1f-e2f9-41e8-a9c8-1d3b3297528e-catalog-content\") pod \"redhat-operators-dhxch\" (UID: \"112aad1f-e2f9-41e8-a9c8-1d3b3297528e\") " pod="openshift-marketplace/redhat-operators-dhxch" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.670400 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jzsl\" (UniqueName: \"kubernetes.io/projected/112aad1f-e2f9-41e8-a9c8-1d3b3297528e-kube-api-access-7jzsl\") pod \"redhat-operators-dhxch\" (UID: \"112aad1f-e2f9-41e8-a9c8-1d3b3297528e\") " pod="openshift-marketplace/redhat-operators-dhxch" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.670445 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/112aad1f-e2f9-41e8-a9c8-1d3b3297528e-utilities\") pod \"redhat-operators-dhxch\" (UID: \"112aad1f-e2f9-41e8-a9c8-1d3b3297528e\") " pod="openshift-marketplace/redhat-operators-dhxch" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.671018 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/112aad1f-e2f9-41e8-a9c8-1d3b3297528e-utilities\") pod \"redhat-operators-dhxch\" (UID: \"112aad1f-e2f9-41e8-a9c8-1d3b3297528e\") " pod="openshift-marketplace/redhat-operators-dhxch" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.672067 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/112aad1f-e2f9-41e8-a9c8-1d3b3297528e-catalog-content\") pod \"redhat-operators-dhxch\" (UID: \"112aad1f-e2f9-41e8-a9c8-1d3b3297528e\") " pod="openshift-marketplace/redhat-operators-dhxch" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.688113 4869 patch_prober.go:28] interesting pod/router-default-5444994796-dvjw7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 10:56:41 crc kubenswrapper[4869]: [-]has-synced failed: reason withheld Jan 30 10:56:41 crc kubenswrapper[4869]: [+]process-running ok Jan 30 10:56:41 crc kubenswrapper[4869]: healthz check failed Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.688169 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dvjw7" podUID="083c9bb4-c4ed-4217-bff5-3babe25ac772" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.709879 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-v67cm"] Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.711339 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v67cm" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.728049 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jzsl\" (UniqueName: \"kubernetes.io/projected/112aad1f-e2f9-41e8-a9c8-1d3b3297528e-kube-api-access-7jzsl\") pod \"redhat-operators-dhxch\" (UID: \"112aad1f-e2f9-41e8-a9c8-1d3b3297528e\") " pod="openshift-marketplace/redhat-operators-dhxch" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.737153 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.741783 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v67cm"] Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.771261 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71baa75f-e5ac-48ac-a224-e943ec26090c-utilities\") pod \"redhat-operators-v67cm\" (UID: \"71baa75f-e5ac-48ac-a224-e943ec26090c\") " pod="openshift-marketplace/redhat-operators-v67cm" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.771293 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7ssh\" (UniqueName: \"kubernetes.io/projected/71baa75f-e5ac-48ac-a224-e943ec26090c-kube-api-access-t7ssh\") pod \"redhat-operators-v67cm\" (UID: \"71baa75f-e5ac-48ac-a224-e943ec26090c\") " pod="openshift-marketplace/redhat-operators-v67cm" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.771313 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71baa75f-e5ac-48ac-a224-e943ec26090c-catalog-content\") pod \"redhat-operators-v67cm\" (UID: \"71baa75f-e5ac-48ac-a224-e943ec26090c\") " pod="openshift-marketplace/redhat-operators-v67cm" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.872483 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7ssh\" (UniqueName: \"kubernetes.io/projected/71baa75f-e5ac-48ac-a224-e943ec26090c-kube-api-access-t7ssh\") pod \"redhat-operators-v67cm\" (UID: \"71baa75f-e5ac-48ac-a224-e943ec26090c\") " pod="openshift-marketplace/redhat-operators-v67cm" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.872543 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71baa75f-e5ac-48ac-a224-e943ec26090c-catalog-content\") pod \"redhat-operators-v67cm\" (UID: \"71baa75f-e5ac-48ac-a224-e943ec26090c\") " pod="openshift-marketplace/redhat-operators-v67cm" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.872659 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71baa75f-e5ac-48ac-a224-e943ec26090c-utilities\") pod \"redhat-operators-v67cm\" (UID: \"71baa75f-e5ac-48ac-a224-e943ec26090c\") " pod="openshift-marketplace/redhat-operators-v67cm" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.876443 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71baa75f-e5ac-48ac-a224-e943ec26090c-catalog-content\") pod \"redhat-operators-v67cm\" (UID: \"71baa75f-e5ac-48ac-a224-e943ec26090c\") " pod="openshift-marketplace/redhat-operators-v67cm" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.879302 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71baa75f-e5ac-48ac-a224-e943ec26090c-utilities\") pod \"redhat-operators-v67cm\" (UID: \"71baa75f-e5ac-48ac-a224-e943ec26090c\") " pod="openshift-marketplace/redhat-operators-v67cm" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.899798 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7ssh\" (UniqueName: \"kubernetes.io/projected/71baa75f-e5ac-48ac-a224-e943ec26090c-kube-api-access-t7ssh\") pod \"redhat-operators-v67cm\" (UID: \"71baa75f-e5ac-48ac-a224-e943ec26090c\") " pod="openshift-marketplace/redhat-operators-v67cm" Jan 30 10:56:41 crc kubenswrapper[4869]: I0130 10:56:41.995212 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dhxch" Jan 30 10:56:42 crc kubenswrapper[4869]: I0130 10:56:42.043646 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v67cm" Jan 30 10:56:42 crc kubenswrapper[4869]: I0130 10:56:42.141777 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Jan 30 10:56:42 crc kubenswrapper[4869]: I0130 10:56:42.319956 4869 generic.go:334] "Generic (PLEG): container finished" podID="570acf30-50dd-4eb1-80ad-6d7d18af91ee" containerID="5fb0e70f506b6a988a0356513e70be9ec4728a3fecf83113510e97ec4deef3e9" exitCode=0 Jan 30 10:56:42 crc kubenswrapper[4869]: I0130 10:56:42.320031 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"570acf30-50dd-4eb1-80ad-6d7d18af91ee","Type":"ContainerDied","Data":"5fb0e70f506b6a988a0356513e70be9ec4728a3fecf83113510e97ec4deef3e9"} Jan 30 10:56:42 crc kubenswrapper[4869]: I0130 10:56:42.321563 4869 generic.go:334] "Generic (PLEG): container finished" podID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" containerID="93cbae64c4b7aeb176d1c21ab71f0706da038fea57405f151ffb4c7379675e81" exitCode=0 Jan 30 10:56:42 crc kubenswrapper[4869]: I0130 10:56:42.321620 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ht8ck" event={"ID":"2db1788d-f11c-49f6-b613-d3ed750c8d8a","Type":"ContainerDied","Data":"93cbae64c4b7aeb176d1c21ab71f0706da038fea57405f151ffb4c7379675e81"} Jan 30 10:56:42 crc kubenswrapper[4869]: I0130 10:56:42.321649 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ht8ck" event={"ID":"2db1788d-f11c-49f6-b613-d3ed750c8d8a","Type":"ContainerStarted","Data":"aed7914cbd64b379d96dd6f6b6396d1bd9b26794ddfdb939a94d36ef14c2f1f2"} Jan 30 10:56:42 crc kubenswrapper[4869]: I0130 10:56:42.324122 4869 generic.go:334] "Generic (PLEG): container finished" podID="624037a4-840b-4c6d-806b-6b0d2276328d" containerID="79907ccc904a8e91f6ae91d0c6026a416f3e793ffd96165294ae8f767ea96913" exitCode=0 Jan 30 10:56:42 crc kubenswrapper[4869]: I0130 10:56:42.324411 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496165-hwsc7" event={"ID":"624037a4-840b-4c6d-806b-6b0d2276328d","Type":"ContainerDied","Data":"79907ccc904a8e91f6ae91d0c6026a416f3e793ffd96165294ae8f767ea96913"} Jan 30 10:56:42 crc kubenswrapper[4869]: I0130 10:56:42.383476 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8g8t8"] Jan 30 10:56:42 crc kubenswrapper[4869]: I0130 10:56:42.418336 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 30 10:56:42 crc kubenswrapper[4869]: I0130 10:56:42.612047 4869 patch_prober.go:28] interesting pod/router-default-5444994796-dvjw7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 10:56:42 crc kubenswrapper[4869]: [-]has-synced failed: reason withheld Jan 30 10:56:42 crc kubenswrapper[4869]: [+]process-running ok Jan 30 10:56:42 crc kubenswrapper[4869]: healthz check failed Jan 30 10:56:42 crc kubenswrapper[4869]: I0130 10:56:42.612112 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dvjw7" podUID="083c9bb4-c4ed-4217-bff5-3babe25ac772" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 10:56:42 crc kubenswrapper[4869]: I0130 10:56:42.831752 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v67cm"] Jan 30 10:56:42 crc kubenswrapper[4869]: I0130 10:56:42.848652 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dhxch"] Jan 30 10:56:42 crc kubenswrapper[4869]: W0130 10:56:42.864036 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71baa75f_e5ac_48ac_a224_e943ec26090c.slice/crio-062f8792c8fc7027d054f6f006a1de81b9b955acb06b0d2d769c149dc348fa12 WatchSource:0}: Error finding container 062f8792c8fc7027d054f6f006a1de81b9b955acb06b0d2d769c149dc348fa12: Status 404 returned error can't find the container with id 062f8792c8fc7027d054f6f006a1de81b9b955acb06b0d2d769c149dc348fa12 Jan 30 10:56:42 crc kubenswrapper[4869]: W0130 10:56:42.876745 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod112aad1f_e2f9_41e8_a9c8_1d3b3297528e.slice/crio-ba5a68e1b760c91efdf68d52c685417ab35111c5aff5860f36a75614461f0325 WatchSource:0}: Error finding container ba5a68e1b760c91efdf68d52c685417ab35111c5aff5860f36a75614461f0325: Status 404 returned error can't find the container with id ba5a68e1b760c91efdf68d52c685417ab35111c5aff5860f36a75614461f0325 Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.329998 4869 generic.go:334] "Generic (PLEG): container finished" podID="71baa75f-e5ac-48ac-a224-e943ec26090c" containerID="ccf9adc5f0bd2614f3e3d741e57dc9d9d3a74325b2dc7acb15107ccd8e686b6c" exitCode=0 Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.330299 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v67cm" event={"ID":"71baa75f-e5ac-48ac-a224-e943ec26090c","Type":"ContainerDied","Data":"ccf9adc5f0bd2614f3e3d741e57dc9d9d3a74325b2dc7acb15107ccd8e686b6c"} Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.330324 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v67cm" event={"ID":"71baa75f-e5ac-48ac-a224-e943ec26090c","Type":"ContainerStarted","Data":"062f8792c8fc7027d054f6f006a1de81b9b955acb06b0d2d769c149dc348fa12"} Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.359195 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"820371c0-12a8-4832-9d2a-c49ecf8b6cc7","Type":"ContainerStarted","Data":"a61e2b9fd2166acd5dd5f29f863a19c1858c29cca725d48727df46b9662f7d06"} Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.359249 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"820371c0-12a8-4832-9d2a-c49ecf8b6cc7","Type":"ContainerStarted","Data":"b8077fa7aaf51326c84005a9db1f2915f41fb3df992de6687c554e701e41b52d"} Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.362336 4869 generic.go:334] "Generic (PLEG): container finished" podID="a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5" containerID="c147e1e51f0cdaab4ef3e12e6527eb931e3731509b3c05660a78296758d73dfa" exitCode=0 Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.362420 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8g8t8" event={"ID":"a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5","Type":"ContainerDied","Data":"c147e1e51f0cdaab4ef3e12e6527eb931e3731509b3c05660a78296758d73dfa"} Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.362464 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8g8t8" event={"ID":"a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5","Type":"ContainerStarted","Data":"912bd8fd2986d2ed7eec1f2d63a4b3664ec375cb098b94d6742f4f37ca68f3ca"} Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.368068 4869 generic.go:334] "Generic (PLEG): container finished" podID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" containerID="c929e29bb2baec575e0d7b0ca2e44c8a5bc506141cc185b6d817b8f6fd97b1d8" exitCode=0 Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.368804 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dhxch" event={"ID":"112aad1f-e2f9-41e8-a9c8-1d3b3297528e","Type":"ContainerDied","Data":"c929e29bb2baec575e0d7b0ca2e44c8a5bc506141cc185b6d817b8f6fd97b1d8"} Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.368856 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dhxch" event={"ID":"112aad1f-e2f9-41e8-a9c8-1d3b3297528e","Type":"ContainerStarted","Data":"ba5a68e1b760c91efdf68d52c685417ab35111c5aff5860f36a75614461f0325"} Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.377202 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.37718253 podStartE2EDuration="3.37718253s" podCreationTimestamp="2026-01-30 10:56:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:56:43.373417064 +0000 UTC m=+153.923293150" watchObservedRunningTime="2026-01-30 10:56:43.37718253 +0000 UTC m=+153.927058596" Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.521773 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-djqwl" Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.613913 4869 patch_prober.go:28] interesting pod/router-default-5444994796-dvjw7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 10:56:43 crc kubenswrapper[4869]: [-]has-synced failed: reason withheld Jan 30 10:56:43 crc kubenswrapper[4869]: [+]process-running ok Jan 30 10:56:43 crc kubenswrapper[4869]: healthz check failed Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.613979 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dvjw7" podUID="083c9bb4-c4ed-4217-bff5-3babe25ac772" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.684750 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496165-hwsc7" Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.698099 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nsc5j\" (UniqueName: \"kubernetes.io/projected/624037a4-840b-4c6d-806b-6b0d2276328d-kube-api-access-nsc5j\") pod \"624037a4-840b-4c6d-806b-6b0d2276328d\" (UID: \"624037a4-840b-4c6d-806b-6b0d2276328d\") " Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.698192 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/624037a4-840b-4c6d-806b-6b0d2276328d-config-volume\") pod \"624037a4-840b-4c6d-806b-6b0d2276328d\" (UID: \"624037a4-840b-4c6d-806b-6b0d2276328d\") " Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.698247 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/624037a4-840b-4c6d-806b-6b0d2276328d-secret-volume\") pod \"624037a4-840b-4c6d-806b-6b0d2276328d\" (UID: \"624037a4-840b-4c6d-806b-6b0d2276328d\") " Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.704017 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/624037a4-840b-4c6d-806b-6b0d2276328d-config-volume" (OuterVolumeSpecName: "config-volume") pod "624037a4-840b-4c6d-806b-6b0d2276328d" (UID: "624037a4-840b-4c6d-806b-6b0d2276328d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.708962 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/624037a4-840b-4c6d-806b-6b0d2276328d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "624037a4-840b-4c6d-806b-6b0d2276328d" (UID: "624037a4-840b-4c6d-806b-6b0d2276328d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.709828 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/624037a4-840b-4c6d-806b-6b0d2276328d-kube-api-access-nsc5j" (OuterVolumeSpecName: "kube-api-access-nsc5j") pod "624037a4-840b-4c6d-806b-6b0d2276328d" (UID: "624037a4-840b-4c6d-806b-6b0d2276328d"). InnerVolumeSpecName "kube-api-access-nsc5j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.737021 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.801422 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/570acf30-50dd-4eb1-80ad-6d7d18af91ee-kubelet-dir\") pod \"570acf30-50dd-4eb1-80ad-6d7d18af91ee\" (UID: \"570acf30-50dd-4eb1-80ad-6d7d18af91ee\") " Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.801490 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/570acf30-50dd-4eb1-80ad-6d7d18af91ee-kube-api-access\") pod \"570acf30-50dd-4eb1-80ad-6d7d18af91ee\" (UID: \"570acf30-50dd-4eb1-80ad-6d7d18af91ee\") " Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.801787 4869 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/624037a4-840b-4c6d-806b-6b0d2276328d-config-volume\") on node \"crc\" DevicePath \"\"" Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.801803 4869 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/624037a4-840b-4c6d-806b-6b0d2276328d-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.801813 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nsc5j\" (UniqueName: \"kubernetes.io/projected/624037a4-840b-4c6d-806b-6b0d2276328d-kube-api-access-nsc5j\") on node \"crc\" DevicePath \"\"" Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.802204 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/570acf30-50dd-4eb1-80ad-6d7d18af91ee-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "570acf30-50dd-4eb1-80ad-6d7d18af91ee" (UID: "570acf30-50dd-4eb1-80ad-6d7d18af91ee"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.808374 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/570acf30-50dd-4eb1-80ad-6d7d18af91ee-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "570acf30-50dd-4eb1-80ad-6d7d18af91ee" (UID: "570acf30-50dd-4eb1-80ad-6d7d18af91ee"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.913535 4869 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/570acf30-50dd-4eb1-80ad-6d7d18af91ee-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 30 10:56:43 crc kubenswrapper[4869]: I0130 10:56:43.913598 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/570acf30-50dd-4eb1-80ad-6d7d18af91ee-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 30 10:56:44 crc kubenswrapper[4869]: I0130 10:56:44.378286 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"570acf30-50dd-4eb1-80ad-6d7d18af91ee","Type":"ContainerDied","Data":"baaf0b0e1ddc17fe379bb016d890038dabf5a5ae9482197e879fce3dc15b19a8"} Jan 30 10:56:44 crc kubenswrapper[4869]: I0130 10:56:44.378335 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="baaf0b0e1ddc17fe379bb016d890038dabf5a5ae9482197e879fce3dc15b19a8" Jan 30 10:56:44 crc kubenswrapper[4869]: I0130 10:56:44.378407 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 10:56:44 crc kubenswrapper[4869]: I0130 10:56:44.381621 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496165-hwsc7" event={"ID":"624037a4-840b-4c6d-806b-6b0d2276328d","Type":"ContainerDied","Data":"9c2820286cad1246665075e974e26eaa0988da7cd35021af77a4a319bdc7d662"} Jan 30 10:56:44 crc kubenswrapper[4869]: I0130 10:56:44.381659 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9c2820286cad1246665075e974e26eaa0988da7cd35021af77a4a319bdc7d662" Jan 30 10:56:44 crc kubenswrapper[4869]: I0130 10:56:44.381739 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496165-hwsc7" Jan 30 10:56:44 crc kubenswrapper[4869]: I0130 10:56:44.385744 4869 generic.go:334] "Generic (PLEG): container finished" podID="820371c0-12a8-4832-9d2a-c49ecf8b6cc7" containerID="a61e2b9fd2166acd5dd5f29f863a19c1858c29cca725d48727df46b9662f7d06" exitCode=0 Jan 30 10:56:44 crc kubenswrapper[4869]: I0130 10:56:44.385802 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"820371c0-12a8-4832-9d2a-c49ecf8b6cc7","Type":"ContainerDied","Data":"a61e2b9fd2166acd5dd5f29f863a19c1858c29cca725d48727df46b9662f7d06"} Jan 30 10:56:44 crc kubenswrapper[4869]: I0130 10:56:44.611900 4869 patch_prober.go:28] interesting pod/router-default-5444994796-dvjw7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 10:56:44 crc kubenswrapper[4869]: [-]has-synced failed: reason withheld Jan 30 10:56:44 crc kubenswrapper[4869]: [+]process-running ok Jan 30 10:56:44 crc kubenswrapper[4869]: healthz check failed Jan 30 10:56:44 crc kubenswrapper[4869]: I0130 10:56:44.618744 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dvjw7" podUID="083c9bb4-c4ed-4217-bff5-3babe25ac772" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 10:56:45 crc kubenswrapper[4869]: I0130 10:56:45.611965 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-dvjw7" Jan 30 10:56:45 crc kubenswrapper[4869]: I0130 10:56:45.614973 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-dvjw7" Jan 30 10:56:50 crc kubenswrapper[4869]: I0130 10:56:50.730007 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 10:56:50 crc kubenswrapper[4869]: I0130 10:56:50.748232 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/820371c0-12a8-4832-9d2a-c49ecf8b6cc7-kubelet-dir\") pod \"820371c0-12a8-4832-9d2a-c49ecf8b6cc7\" (UID: \"820371c0-12a8-4832-9d2a-c49ecf8b6cc7\") " Jan 30 10:56:50 crc kubenswrapper[4869]: I0130 10:56:50.748377 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/820371c0-12a8-4832-9d2a-c49ecf8b6cc7-kube-api-access\") pod \"820371c0-12a8-4832-9d2a-c49ecf8b6cc7\" (UID: \"820371c0-12a8-4832-9d2a-c49ecf8b6cc7\") " Jan 30 10:56:50 crc kubenswrapper[4869]: I0130 10:56:50.748475 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/820371c0-12a8-4832-9d2a-c49ecf8b6cc7-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "820371c0-12a8-4832-9d2a-c49ecf8b6cc7" (UID: "820371c0-12a8-4832-9d2a-c49ecf8b6cc7"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 10:56:50 crc kubenswrapper[4869]: I0130 10:56:50.748781 4869 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/820371c0-12a8-4832-9d2a-c49ecf8b6cc7-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 30 10:56:50 crc kubenswrapper[4869]: I0130 10:56:50.754559 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/820371c0-12a8-4832-9d2a-c49ecf8b6cc7-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "820371c0-12a8-4832-9d2a-c49ecf8b6cc7" (UID: "820371c0-12a8-4832-9d2a-c49ecf8b6cc7"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:56:50 crc kubenswrapper[4869]: I0130 10:56:50.849880 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/820371c0-12a8-4832-9d2a-c49ecf8b6cc7-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 30 10:56:51 crc kubenswrapper[4869]: I0130 10:56:51.287072 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-g46p9" Jan 30 10:56:51 crc kubenswrapper[4869]: I0130 10:56:51.444014 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"820371c0-12a8-4832-9d2a-c49ecf8b6cc7","Type":"ContainerDied","Data":"b8077fa7aaf51326c84005a9db1f2915f41fb3df992de6687c554e701e41b52d"} Jan 30 10:56:51 crc kubenswrapper[4869]: I0130 10:56:51.444225 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b8077fa7aaf51326c84005a9db1f2915f41fb3df992de6687c554e701e41b52d" Jan 30 10:56:51 crc kubenswrapper[4869]: I0130 10:56:51.444298 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 10:56:51 crc kubenswrapper[4869]: I0130 10:56:51.522977 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:51 crc kubenswrapper[4869]: I0130 10:56:51.534921 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 10:56:51 crc kubenswrapper[4869]: I0130 10:56:51.769548 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 10:56:51 crc kubenswrapper[4869]: I0130 10:56:51.769601 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 10:56:52 crc kubenswrapper[4869]: I0130 10:56:52.772812 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs\") pod \"network-metrics-daemon-2krt6\" (UID: \"35533ad8-7435-413d-bad1-05a0ca183c0d\") " pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:56:52 crc kubenswrapper[4869]: I0130 10:56:52.786582 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/35533ad8-7435-413d-bad1-05a0ca183c0d-metrics-certs\") pod \"network-metrics-daemon-2krt6\" (UID: \"35533ad8-7435-413d-bad1-05a0ca183c0d\") " pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:56:52 crc kubenswrapper[4869]: I0130 10:56:52.945177 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2krt6" Jan 30 10:57:00 crc kubenswrapper[4869]: I0130 10:57:00.767391 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 10:57:06 crc kubenswrapper[4869]: E0130 10:57:06.524828 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 30 10:57:06 crc kubenswrapper[4869]: E0130 10:57:06.525504 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4zcwq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-gg5f8_openshift-marketplace(46b8829e-b1f9-4b12-bcbf-da01b6ef5840): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 30 10:57:06 crc kubenswrapper[4869]: E0130 10:57:06.526751 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-gg5f8" podUID="46b8829e-b1f9-4b12-bcbf-da01b6ef5840" Jan 30 10:57:06 crc kubenswrapper[4869]: E0130 10:57:06.545791 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-gg5f8" podUID="46b8829e-b1f9-4b12-bcbf-da01b6ef5840" Jan 30 10:57:06 crc kubenswrapper[4869]: I0130 10:57:06.921856 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-2krt6"] Jan 30 10:57:09 crc kubenswrapper[4869]: W0130 10:57:09.793106 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod35533ad8_7435_413d_bad1_05a0ca183c0d.slice/crio-911b9df70310be5ad60cb5f4f62b3f4d664350197adf086e5b2df452f1535636 WatchSource:0}: Error finding container 911b9df70310be5ad60cb5f4f62b3f4d664350197adf086e5b2df452f1535636: Status 404 returned error can't find the container with id 911b9df70310be5ad60cb5f4f62b3f4d664350197adf086e5b2df452f1535636 Jan 30 10:57:10 crc kubenswrapper[4869]: I0130 10:57:10.563837 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dhxch" event={"ID":"112aad1f-e2f9-41e8-a9c8-1d3b3297528e","Type":"ContainerStarted","Data":"ef927e52f370e13eac259fe167a45848c8199acc84fc1738f61757ab0452ff90"} Jan 30 10:57:10 crc kubenswrapper[4869]: I0130 10:57:10.568576 4869 generic.go:334] "Generic (PLEG): container finished" podID="a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5" containerID="1e043e47bd678f540dc9083695b9aecf10d9c21646a324f6c05d523977858810" exitCode=0 Jan 30 10:57:10 crc kubenswrapper[4869]: I0130 10:57:10.568684 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8g8t8" event={"ID":"a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5","Type":"ContainerDied","Data":"1e043e47bd678f540dc9083695b9aecf10d9c21646a324f6c05d523977858810"} Jan 30 10:57:10 crc kubenswrapper[4869]: I0130 10:57:10.572076 4869 generic.go:334] "Generic (PLEG): container finished" podID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" containerID="29777e8206342d3f8c8cbe8d0c0281461676020493eac04312adf9869b7a2f02" exitCode=0 Jan 30 10:57:10 crc kubenswrapper[4869]: I0130 10:57:10.572149 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ht8ck" event={"ID":"2db1788d-f11c-49f6-b613-d3ed750c8d8a","Type":"ContainerDied","Data":"29777e8206342d3f8c8cbe8d0c0281461676020493eac04312adf9869b7a2f02"} Jan 30 10:57:10 crc kubenswrapper[4869]: I0130 10:57:10.575984 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v67cm" event={"ID":"71baa75f-e5ac-48ac-a224-e943ec26090c","Type":"ContainerStarted","Data":"c6b405d4a4720d3e0ec20007cc17796e5eade6733a8f2c59fdebc1ff611add38"} Jan 30 10:57:10 crc kubenswrapper[4869]: I0130 10:57:10.579292 4869 generic.go:334] "Generic (PLEG): container finished" podID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" containerID="a46762d485a43d8e6e1d3a3e3b044c6ec091159b57794397e9cf4b732608d007" exitCode=0 Jan 30 10:57:10 crc kubenswrapper[4869]: I0130 10:57:10.579419 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gcxdk" event={"ID":"18c7bbb6-bdf6-4e26-9670-49a30b7dab22","Type":"ContainerDied","Data":"a46762d485a43d8e6e1d3a3e3b044c6ec091159b57794397e9cf4b732608d007"} Jan 30 10:57:10 crc kubenswrapper[4869]: I0130 10:57:10.581073 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-2krt6" event={"ID":"35533ad8-7435-413d-bad1-05a0ca183c0d","Type":"ContainerStarted","Data":"77b39bc9b7feae9e8ff671af77e6adfa03e168c1651f0d13dcd9b7a791817139"} Jan 30 10:57:10 crc kubenswrapper[4869]: I0130 10:57:10.581099 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-2krt6" event={"ID":"35533ad8-7435-413d-bad1-05a0ca183c0d","Type":"ContainerStarted","Data":"911b9df70310be5ad60cb5f4f62b3f4d664350197adf086e5b2df452f1535636"} Jan 30 10:57:10 crc kubenswrapper[4869]: I0130 10:57:10.584787 4869 generic.go:334] "Generic (PLEG): container finished" podID="d28fe085-7a0e-4de6-8579-88e9583b87a3" containerID="3d4666107dbbf62a5148e0fa95079ed0e256fbd42d4e61dc7cf3fd3413b8a50d" exitCode=0 Jan 30 10:57:10 crc kubenswrapper[4869]: I0130 10:57:10.584848 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m7nxw" event={"ID":"d28fe085-7a0e-4de6-8579-88e9583b87a3","Type":"ContainerDied","Data":"3d4666107dbbf62a5148e0fa95079ed0e256fbd42d4e61dc7cf3fd3413b8a50d"} Jan 30 10:57:10 crc kubenswrapper[4869]: I0130 10:57:10.592692 4869 generic.go:334] "Generic (PLEG): container finished" podID="0229a79c-920d-482f-ab9f-92b042caee36" containerID="66a2d44501a8c9c0f6d659c4a4ba70fcefcd216d897f9635cf2a5b4e7a6021e5" exitCode=0 Jan 30 10:57:10 crc kubenswrapper[4869]: I0130 10:57:10.592779 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mvwzr" event={"ID":"0229a79c-920d-482f-ab9f-92b042caee36","Type":"ContainerDied","Data":"66a2d44501a8c9c0f6d659c4a4ba70fcefcd216d897f9635cf2a5b4e7a6021e5"} Jan 30 10:57:11 crc kubenswrapper[4869]: I0130 10:57:11.599172 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-2krt6" event={"ID":"35533ad8-7435-413d-bad1-05a0ca183c0d","Type":"ContainerStarted","Data":"bab1db29920d1685bd25313801860225e19c6954ea87495a92f5265f6a99164c"} Jan 30 10:57:11 crc kubenswrapper[4869]: I0130 10:57:11.754301 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f2zrw" Jan 30 10:57:12 crc kubenswrapper[4869]: I0130 10:57:12.608265 4869 generic.go:334] "Generic (PLEG): container finished" podID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" containerID="ef927e52f370e13eac259fe167a45848c8199acc84fc1738f61757ab0452ff90" exitCode=0 Jan 30 10:57:12 crc kubenswrapper[4869]: I0130 10:57:12.608348 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dhxch" event={"ID":"112aad1f-e2f9-41e8-a9c8-1d3b3297528e","Type":"ContainerDied","Data":"ef927e52f370e13eac259fe167a45848c8199acc84fc1738f61757ab0452ff90"} Jan 30 10:57:12 crc kubenswrapper[4869]: I0130 10:57:12.612366 4869 generic.go:334] "Generic (PLEG): container finished" podID="71baa75f-e5ac-48ac-a224-e943ec26090c" containerID="c6b405d4a4720d3e0ec20007cc17796e5eade6733a8f2c59fdebc1ff611add38" exitCode=0 Jan 30 10:57:12 crc kubenswrapper[4869]: I0130 10:57:12.612418 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v67cm" event={"ID":"71baa75f-e5ac-48ac-a224-e943ec26090c","Type":"ContainerDied","Data":"c6b405d4a4720d3e0ec20007cc17796e5eade6733a8f2c59fdebc1ff611add38"} Jan 30 10:57:12 crc kubenswrapper[4869]: I0130 10:57:12.650089 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-2krt6" podStartSLOduration=162.650069453 podStartE2EDuration="2m42.650069453s" podCreationTimestamp="2026-01-30 10:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:57:12.648396371 +0000 UTC m=+183.198272447" watchObservedRunningTime="2026-01-30 10:57:12.650069453 +0000 UTC m=+183.199945519" Jan 30 10:57:13 crc kubenswrapper[4869]: I0130 10:57:13.623739 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v67cm" event={"ID":"71baa75f-e5ac-48ac-a224-e943ec26090c","Type":"ContainerStarted","Data":"7dbaba4695b0915c7a7d39f43a45aaeafa5e9f14c439d417aac6571b186b97d1"} Jan 30 10:57:13 crc kubenswrapper[4869]: I0130 10:57:13.633863 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gcxdk" event={"ID":"18c7bbb6-bdf6-4e26-9670-49a30b7dab22","Type":"ContainerStarted","Data":"2fe077b01ea0475b048c92fcbda9b7f4612e727195f1f158659f467d3e7581f9"} Jan 30 10:57:13 crc kubenswrapper[4869]: I0130 10:57:13.639791 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m7nxw" event={"ID":"d28fe085-7a0e-4de6-8579-88e9583b87a3","Type":"ContainerStarted","Data":"e14637d0ad031188eb163b753afb5ef579d9edccc84ee0bef98e945349d5608d"} Jan 30 10:57:13 crc kubenswrapper[4869]: I0130 10:57:13.642503 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8g8t8" event={"ID":"a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5","Type":"ContainerStarted","Data":"a3d497ec914f7b2ecfbec197d2d1438d1095ffdb2995b7755dd8cd185138aae5"} Jan 30 10:57:13 crc kubenswrapper[4869]: I0130 10:57:13.648427 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-v67cm" podStartSLOduration=2.558029274 podStartE2EDuration="32.648405712s" podCreationTimestamp="2026-01-30 10:56:41 +0000 UTC" firstStartedPulling="2026-01-30 10:56:43.332179545 +0000 UTC m=+153.882055611" lastFinishedPulling="2026-01-30 10:57:13.422555993 +0000 UTC m=+183.972432049" observedRunningTime="2026-01-30 10:57:13.643931754 +0000 UTC m=+184.193807820" watchObservedRunningTime="2026-01-30 10:57:13.648405712 +0000 UTC m=+184.198281778" Jan 30 10:57:13 crc kubenswrapper[4869]: I0130 10:57:13.651373 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ht8ck" event={"ID":"2db1788d-f11c-49f6-b613-d3ed750c8d8a","Type":"ContainerStarted","Data":"28108176efa84fbe3b6f2c8d41b3d9bba63e02a9a1602e373c260a4134f00b22"} Jan 30 10:57:13 crc kubenswrapper[4869]: I0130 10:57:13.670922 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gcxdk" podStartSLOduration=3.375763226 podStartE2EDuration="35.670901024s" podCreationTimestamp="2026-01-30 10:56:38 +0000 UTC" firstStartedPulling="2026-01-30 10:56:40.200796342 +0000 UTC m=+150.750672408" lastFinishedPulling="2026-01-30 10:57:12.49593415 +0000 UTC m=+183.045810206" observedRunningTime="2026-01-30 10:57:13.668105748 +0000 UTC m=+184.217981834" watchObservedRunningTime="2026-01-30 10:57:13.670901024 +0000 UTC m=+184.220777090" Jan 30 10:57:13 crc kubenswrapper[4869]: I0130 10:57:13.685486 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-m7nxw" podStartSLOduration=2.622165108 podStartE2EDuration="35.685472003s" podCreationTimestamp="2026-01-30 10:56:38 +0000 UTC" firstStartedPulling="2026-01-30 10:56:40.227155993 +0000 UTC m=+150.777032059" lastFinishedPulling="2026-01-30 10:57:13.290462888 +0000 UTC m=+183.840338954" observedRunningTime="2026-01-30 10:57:13.682599954 +0000 UTC m=+184.232476020" watchObservedRunningTime="2026-01-30 10:57:13.685472003 +0000 UTC m=+184.235348069" Jan 30 10:57:13 crc kubenswrapper[4869]: I0130 10:57:13.706695 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8g8t8" podStartSLOduration=2.85652726 podStartE2EDuration="32.706674765s" podCreationTimestamp="2026-01-30 10:56:41 +0000 UTC" firstStartedPulling="2026-01-30 10:56:43.364213061 +0000 UTC m=+153.914089127" lastFinishedPulling="2026-01-30 10:57:13.214360566 +0000 UTC m=+183.764236632" observedRunningTime="2026-01-30 10:57:13.704510469 +0000 UTC m=+184.254386545" watchObservedRunningTime="2026-01-30 10:57:13.706674765 +0000 UTC m=+184.256550841" Jan 30 10:57:13 crc kubenswrapper[4869]: I0130 10:57:13.722508 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ht8ck" podStartSLOduration=2.846546683 podStartE2EDuration="33.722489072s" podCreationTimestamp="2026-01-30 10:56:40 +0000 UTC" firstStartedPulling="2026-01-30 10:56:42.323407586 +0000 UTC m=+152.873283652" lastFinishedPulling="2026-01-30 10:57:13.199349975 +0000 UTC m=+183.749226041" observedRunningTime="2026-01-30 10:57:13.721478011 +0000 UTC m=+184.271354077" watchObservedRunningTime="2026-01-30 10:57:13.722489072 +0000 UTC m=+184.272365138" Jan 30 10:57:14 crc kubenswrapper[4869]: I0130 10:57:14.661347 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mvwzr" event={"ID":"0229a79c-920d-482f-ab9f-92b042caee36","Type":"ContainerStarted","Data":"6d4c647a80f9b942ba2111a7a51a76410dd89ae46b0b49f8c0afaba0503df8ce"} Jan 30 10:57:14 crc kubenswrapper[4869]: I0130 10:57:14.666171 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dhxch" event={"ID":"112aad1f-e2f9-41e8-a9c8-1d3b3297528e","Type":"ContainerStarted","Data":"31176bb44bb5d56bedb53229458b2ecc95c29c22b00f6b78a0b960d5dcc41f79"} Jan 30 10:57:14 crc kubenswrapper[4869]: I0130 10:57:14.679922 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mvwzr" podStartSLOduration=4.482514922 podStartE2EDuration="36.679908112s" podCreationTimestamp="2026-01-30 10:56:38 +0000 UTC" firstStartedPulling="2026-01-30 10:56:41.292489804 +0000 UTC m=+151.842365870" lastFinishedPulling="2026-01-30 10:57:13.489882994 +0000 UTC m=+184.039759060" observedRunningTime="2026-01-30 10:57:14.677167258 +0000 UTC m=+185.227043334" watchObservedRunningTime="2026-01-30 10:57:14.679908112 +0000 UTC m=+185.229784178" Jan 30 10:57:14 crc kubenswrapper[4869]: I0130 10:57:14.703578 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dhxch" podStartSLOduration=3.477181266 podStartE2EDuration="33.703549759s" podCreationTimestamp="2026-01-30 10:56:41 +0000 UTC" firstStartedPulling="2026-01-30 10:56:43.37167756 +0000 UTC m=+153.921553626" lastFinishedPulling="2026-01-30 10:57:13.598046053 +0000 UTC m=+184.147922119" observedRunningTime="2026-01-30 10:57:14.703388265 +0000 UTC m=+185.253264321" watchObservedRunningTime="2026-01-30 10:57:14.703549759 +0000 UTC m=+185.253425816" Jan 30 10:57:18 crc kubenswrapper[4869]: I0130 10:57:18.175998 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 10:57:18 crc kubenswrapper[4869]: I0130 10:57:18.652756 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-m7nxw" Jan 30 10:57:18 crc kubenswrapper[4869]: I0130 10:57:18.652832 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-m7nxw" Jan 30 10:57:18 crc kubenswrapper[4869]: I0130 10:57:18.696137 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gg5f8" event={"ID":"46b8829e-b1f9-4b12-bcbf-da01b6ef5840","Type":"ContainerStarted","Data":"5a1c787c12c4501ae88515c187975b5f71327073d734705bcf576de68b55fbe8"} Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.054403 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-m7nxw" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.106161 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 30 10:57:19 crc kubenswrapper[4869]: E0130 10:57:19.106674 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="820371c0-12a8-4832-9d2a-c49ecf8b6cc7" containerName="pruner" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.106687 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="820371c0-12a8-4832-9d2a-c49ecf8b6cc7" containerName="pruner" Jan 30 10:57:19 crc kubenswrapper[4869]: E0130 10:57:19.106701 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="570acf30-50dd-4eb1-80ad-6d7d18af91ee" containerName="pruner" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.106734 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="570acf30-50dd-4eb1-80ad-6d7d18af91ee" containerName="pruner" Jan 30 10:57:19 crc kubenswrapper[4869]: E0130 10:57:19.106751 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="624037a4-840b-4c6d-806b-6b0d2276328d" containerName="collect-profiles" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.106761 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="624037a4-840b-4c6d-806b-6b0d2276328d" containerName="collect-profiles" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.106876 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="570acf30-50dd-4eb1-80ad-6d7d18af91ee" containerName="pruner" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.106891 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="820371c0-12a8-4832-9d2a-c49ecf8b6cc7" containerName="pruner" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.106898 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="624037a4-840b-4c6d-806b-6b0d2276328d" containerName="collect-profiles" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.107272 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.109438 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.109563 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.115091 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.120944 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-m7nxw" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.144304 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e483b08b-bf0a-4fff-9956-4b727b09edfd-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e483b08b-bf0a-4fff-9956-4b727b09edfd\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.144589 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e483b08b-bf0a-4fff-9956-4b727b09edfd-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e483b08b-bf0a-4fff-9956-4b727b09edfd\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.168966 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gcxdk" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.169030 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gcxdk" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.208165 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gcxdk" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.246066 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e483b08b-bf0a-4fff-9956-4b727b09edfd-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e483b08b-bf0a-4fff-9956-4b727b09edfd\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.246116 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e483b08b-bf0a-4fff-9956-4b727b09edfd-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e483b08b-bf0a-4fff-9956-4b727b09edfd\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.246200 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e483b08b-bf0a-4fff-9956-4b727b09edfd-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e483b08b-bf0a-4fff-9956-4b727b09edfd\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.261228 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mvwzr" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.261534 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mvwzr" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.267317 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e483b08b-bf0a-4fff-9956-4b727b09edfd-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e483b08b-bf0a-4fff-9956-4b727b09edfd\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.304450 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mvwzr" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.421671 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.706952 4869 generic.go:334] "Generic (PLEG): container finished" podID="46b8829e-b1f9-4b12-bcbf-da01b6ef5840" containerID="5a1c787c12c4501ae88515c187975b5f71327073d734705bcf576de68b55fbe8" exitCode=0 Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.708201 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gg5f8" event={"ID":"46b8829e-b1f9-4b12-bcbf-da01b6ef5840","Type":"ContainerDied","Data":"5a1c787c12c4501ae88515c187975b5f71327073d734705bcf576de68b55fbe8"} Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.781440 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gcxdk" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.788696 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mvwzr" Jan 30 10:57:19 crc kubenswrapper[4869]: I0130 10:57:19.938385 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 30 10:57:19 crc kubenswrapper[4869]: W0130 10:57:19.944209 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pode483b08b_bf0a_4fff_9956_4b727b09edfd.slice/crio-f58899f71abc99b20ebd485053ca93a789350b961ed0b6bef734cbacfc850c28 WatchSource:0}: Error finding container f58899f71abc99b20ebd485053ca93a789350b961ed0b6bef734cbacfc850c28: Status 404 returned error can't find the container with id f58899f71abc99b20ebd485053ca93a789350b961ed0b6bef734cbacfc850c28 Jan 30 10:57:20 crc kubenswrapper[4869]: I0130 10:57:20.057881 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dqfj8"] Jan 30 10:57:20 crc kubenswrapper[4869]: I0130 10:57:20.717307 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"e483b08b-bf0a-4fff-9956-4b727b09edfd","Type":"ContainerStarted","Data":"1a4f20adf579faec7615af9046b986d1fb4206cfb135e660618a558afbf47366"} Jan 30 10:57:20 crc kubenswrapper[4869]: I0130 10:57:20.717585 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"e483b08b-bf0a-4fff-9956-4b727b09edfd","Type":"ContainerStarted","Data":"f58899f71abc99b20ebd485053ca93a789350b961ed0b6bef734cbacfc850c28"} Jan 30 10:57:21 crc kubenswrapper[4869]: I0130 10:57:21.022656 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ht8ck" Jan 30 10:57:21 crc kubenswrapper[4869]: I0130 10:57:21.023894 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ht8ck" Jan 30 10:57:21 crc kubenswrapper[4869]: I0130 10:57:21.075408 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ht8ck" Jan 30 10:57:21 crc kubenswrapper[4869]: I0130 10:57:21.448268 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8g8t8" Jan 30 10:57:21 crc kubenswrapper[4869]: I0130 10:57:21.448577 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8g8t8" Jan 30 10:57:21 crc kubenswrapper[4869]: I0130 10:57:21.551411 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8g8t8" Jan 30 10:57:21 crc kubenswrapper[4869]: I0130 10:57:21.726882 4869 generic.go:334] "Generic (PLEG): container finished" podID="e483b08b-bf0a-4fff-9956-4b727b09edfd" containerID="1a4f20adf579faec7615af9046b986d1fb4206cfb135e660618a558afbf47366" exitCode=0 Jan 30 10:57:21 crc kubenswrapper[4869]: I0130 10:57:21.726985 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"e483b08b-bf0a-4fff-9956-4b727b09edfd","Type":"ContainerDied","Data":"1a4f20adf579faec7615af9046b986d1fb4206cfb135e660618a558afbf47366"} Jan 30 10:57:21 crc kubenswrapper[4869]: I0130 10:57:21.769519 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 10:57:21 crc kubenswrapper[4869]: I0130 10:57:21.769582 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 10:57:21 crc kubenswrapper[4869]: I0130 10:57:21.788908 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ht8ck" Jan 30 10:57:21 crc kubenswrapper[4869]: I0130 10:57:21.792569 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8g8t8" Jan 30 10:57:21 crc kubenswrapper[4869]: I0130 10:57:21.996363 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dhxch" Jan 30 10:57:21 crc kubenswrapper[4869]: I0130 10:57:21.996715 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dhxch" Jan 30 10:57:22 crc kubenswrapper[4869]: I0130 10:57:22.042298 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dhxch" Jan 30 10:57:22 crc kubenswrapper[4869]: I0130 10:57:22.044284 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-v67cm" Jan 30 10:57:22 crc kubenswrapper[4869]: I0130 10:57:22.044311 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-v67cm" Jan 30 10:57:22 crc kubenswrapper[4869]: I0130 10:57:22.088400 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-v67cm" Jan 30 10:57:22 crc kubenswrapper[4869]: I0130 10:57:22.794400 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-v67cm" Jan 30 10:57:22 crc kubenswrapper[4869]: I0130 10:57:22.798720 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dhxch" Jan 30 10:57:22 crc kubenswrapper[4869]: I0130 10:57:22.918394 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mvwzr"] Jan 30 10:57:22 crc kubenswrapper[4869]: I0130 10:57:22.918662 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mvwzr" podUID="0229a79c-920d-482f-ab9f-92b042caee36" containerName="registry-server" containerID="cri-o://6d4c647a80f9b942ba2111a7a51a76410dd89ae46b0b49f8c0afaba0503df8ce" gracePeriod=2 Jan 30 10:57:23 crc kubenswrapper[4869]: I0130 10:57:23.527055 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 10:57:23 crc kubenswrapper[4869]: I0130 10:57:23.700283 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e483b08b-bf0a-4fff-9956-4b727b09edfd-kubelet-dir\") pod \"e483b08b-bf0a-4fff-9956-4b727b09edfd\" (UID: \"e483b08b-bf0a-4fff-9956-4b727b09edfd\") " Jan 30 10:57:23 crc kubenswrapper[4869]: I0130 10:57:23.700378 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e483b08b-bf0a-4fff-9956-4b727b09edfd-kube-api-access\") pod \"e483b08b-bf0a-4fff-9956-4b727b09edfd\" (UID: \"e483b08b-bf0a-4fff-9956-4b727b09edfd\") " Jan 30 10:57:23 crc kubenswrapper[4869]: I0130 10:57:23.700433 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e483b08b-bf0a-4fff-9956-4b727b09edfd-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "e483b08b-bf0a-4fff-9956-4b727b09edfd" (UID: "e483b08b-bf0a-4fff-9956-4b727b09edfd"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 10:57:23 crc kubenswrapper[4869]: I0130 10:57:23.700763 4869 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e483b08b-bf0a-4fff-9956-4b727b09edfd-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:23 crc kubenswrapper[4869]: I0130 10:57:23.707162 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e483b08b-bf0a-4fff-9956-4b727b09edfd-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e483b08b-bf0a-4fff-9956-4b727b09edfd" (UID: "e483b08b-bf0a-4fff-9956-4b727b09edfd"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:57:23 crc kubenswrapper[4869]: I0130 10:57:23.741216 4869 generic.go:334] "Generic (PLEG): container finished" podID="0229a79c-920d-482f-ab9f-92b042caee36" containerID="6d4c647a80f9b942ba2111a7a51a76410dd89ae46b0b49f8c0afaba0503df8ce" exitCode=0 Jan 30 10:57:23 crc kubenswrapper[4869]: I0130 10:57:23.741257 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mvwzr" event={"ID":"0229a79c-920d-482f-ab9f-92b042caee36","Type":"ContainerDied","Data":"6d4c647a80f9b942ba2111a7a51a76410dd89ae46b0b49f8c0afaba0503df8ce"} Jan 30 10:57:23 crc kubenswrapper[4869]: I0130 10:57:23.742591 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"e483b08b-bf0a-4fff-9956-4b727b09edfd","Type":"ContainerDied","Data":"f58899f71abc99b20ebd485053ca93a789350b961ed0b6bef734cbacfc850c28"} Jan 30 10:57:23 crc kubenswrapper[4869]: I0130 10:57:23.742638 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f58899f71abc99b20ebd485053ca93a789350b961ed0b6bef734cbacfc850c28" Jan 30 10:57:23 crc kubenswrapper[4869]: I0130 10:57:23.742641 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 10:57:23 crc kubenswrapper[4869]: I0130 10:57:23.801349 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e483b08b-bf0a-4fff-9956-4b727b09edfd-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:24 crc kubenswrapper[4869]: I0130 10:57:24.468505 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mvwzr" Jan 30 10:57:24 crc kubenswrapper[4869]: I0130 10:57:24.610516 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0229a79c-920d-482f-ab9f-92b042caee36-utilities\") pod \"0229a79c-920d-482f-ab9f-92b042caee36\" (UID: \"0229a79c-920d-482f-ab9f-92b042caee36\") " Jan 30 10:57:24 crc kubenswrapper[4869]: I0130 10:57:24.610609 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0229a79c-920d-482f-ab9f-92b042caee36-catalog-content\") pod \"0229a79c-920d-482f-ab9f-92b042caee36\" (UID: \"0229a79c-920d-482f-ab9f-92b042caee36\") " Jan 30 10:57:24 crc kubenswrapper[4869]: I0130 10:57:24.610682 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wkx4w\" (UniqueName: \"kubernetes.io/projected/0229a79c-920d-482f-ab9f-92b042caee36-kube-api-access-wkx4w\") pod \"0229a79c-920d-482f-ab9f-92b042caee36\" (UID: \"0229a79c-920d-482f-ab9f-92b042caee36\") " Jan 30 10:57:24 crc kubenswrapper[4869]: I0130 10:57:24.611294 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0229a79c-920d-482f-ab9f-92b042caee36-utilities" (OuterVolumeSpecName: "utilities") pod "0229a79c-920d-482f-ab9f-92b042caee36" (UID: "0229a79c-920d-482f-ab9f-92b042caee36"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:57:24 crc kubenswrapper[4869]: I0130 10:57:24.614328 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0229a79c-920d-482f-ab9f-92b042caee36-kube-api-access-wkx4w" (OuterVolumeSpecName: "kube-api-access-wkx4w") pod "0229a79c-920d-482f-ab9f-92b042caee36" (UID: "0229a79c-920d-482f-ab9f-92b042caee36"). InnerVolumeSpecName "kube-api-access-wkx4w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:57:24 crc kubenswrapper[4869]: I0130 10:57:24.678586 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0229a79c-920d-482f-ab9f-92b042caee36-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0229a79c-920d-482f-ab9f-92b042caee36" (UID: "0229a79c-920d-482f-ab9f-92b042caee36"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:57:24 crc kubenswrapper[4869]: I0130 10:57:24.711940 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0229a79c-920d-482f-ab9f-92b042caee36-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:24 crc kubenswrapper[4869]: I0130 10:57:24.711983 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wkx4w\" (UniqueName: \"kubernetes.io/projected/0229a79c-920d-482f-ab9f-92b042caee36-kube-api-access-wkx4w\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:24 crc kubenswrapper[4869]: I0130 10:57:24.711999 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0229a79c-920d-482f-ab9f-92b042caee36-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:24 crc kubenswrapper[4869]: I0130 10:57:24.749763 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mvwzr" event={"ID":"0229a79c-920d-482f-ab9f-92b042caee36","Type":"ContainerDied","Data":"11e7009314ac18bc2201cf32d19673fef90d50c52af2779f430032d766895ce5"} Jan 30 10:57:24 crc kubenswrapper[4869]: I0130 10:57:24.749783 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mvwzr" Jan 30 10:57:24 crc kubenswrapper[4869]: I0130 10:57:24.749838 4869 scope.go:117] "RemoveContainer" containerID="6d4c647a80f9b942ba2111a7a51a76410dd89ae46b0b49f8c0afaba0503df8ce" Jan 30 10:57:24 crc kubenswrapper[4869]: I0130 10:57:24.751949 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gg5f8" event={"ID":"46b8829e-b1f9-4b12-bcbf-da01b6ef5840","Type":"ContainerStarted","Data":"b2b90f4e8a0f8bff0530f545546238f7408bf0ef907ac4497e2f979cce7571bd"} Jan 30 10:57:24 crc kubenswrapper[4869]: I0130 10:57:24.781215 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mvwzr"] Jan 30 10:57:24 crc kubenswrapper[4869]: I0130 10:57:24.781469 4869 scope.go:117] "RemoveContainer" containerID="66a2d44501a8c9c0f6d659c4a4ba70fcefcd216d897f9635cf2a5b4e7a6021e5" Jan 30 10:57:24 crc kubenswrapper[4869]: I0130 10:57:24.785003 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mvwzr"] Jan 30 10:57:24 crc kubenswrapper[4869]: I0130 10:57:24.796547 4869 scope.go:117] "RemoveContainer" containerID="5c67dbc7ed126ef5c2340b24fd76e2ae25921bed07f788f86eb51c990e311991" Jan 30 10:57:25 crc kubenswrapper[4869]: I0130 10:57:25.109170 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8g8t8"] Jan 30 10:57:25 crc kubenswrapper[4869]: I0130 10:57:25.109386 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8g8t8" podUID="a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5" containerName="registry-server" containerID="cri-o://a3d497ec914f7b2ecfbec197d2d1438d1095ffdb2995b7755dd8cd185138aae5" gracePeriod=2 Jan 30 10:57:25 crc kubenswrapper[4869]: I0130 10:57:25.311103 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v67cm"] Jan 30 10:57:25 crc kubenswrapper[4869]: I0130 10:57:25.311310 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-v67cm" podUID="71baa75f-e5ac-48ac-a224-e943ec26090c" containerName="registry-server" containerID="cri-o://7dbaba4695b0915c7a7d39f43a45aaeafa5e9f14c439d417aac6571b186b97d1" gracePeriod=2 Jan 30 10:57:25 crc kubenswrapper[4869]: I0130 10:57:25.761858 4869 generic.go:334] "Generic (PLEG): container finished" podID="a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5" containerID="a3d497ec914f7b2ecfbec197d2d1438d1095ffdb2995b7755dd8cd185138aae5" exitCode=0 Jan 30 10:57:25 crc kubenswrapper[4869]: I0130 10:57:25.761933 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8g8t8" event={"ID":"a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5","Type":"ContainerDied","Data":"a3d497ec914f7b2ecfbec197d2d1438d1095ffdb2995b7755dd8cd185138aae5"} Jan 30 10:57:25 crc kubenswrapper[4869]: I0130 10:57:25.777913 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gg5f8" podStartSLOduration=3.897879282 podStartE2EDuration="47.777894539s" podCreationTimestamp="2026-01-30 10:56:38 +0000 UTC" firstStartedPulling="2026-01-30 10:56:40.206962282 +0000 UTC m=+150.756838348" lastFinishedPulling="2026-01-30 10:57:24.086977539 +0000 UTC m=+194.636853605" observedRunningTime="2026-01-30 10:57:25.776918929 +0000 UTC m=+196.326795005" watchObservedRunningTime="2026-01-30 10:57:25.777894539 +0000 UTC m=+196.327770625" Jan 30 10:57:25 crc kubenswrapper[4869]: I0130 10:57:25.905740 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 30 10:57:25 crc kubenswrapper[4869]: E0130 10:57:25.905973 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0229a79c-920d-482f-ab9f-92b042caee36" containerName="extract-content" Jan 30 10:57:25 crc kubenswrapper[4869]: I0130 10:57:25.905985 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0229a79c-920d-482f-ab9f-92b042caee36" containerName="extract-content" Jan 30 10:57:25 crc kubenswrapper[4869]: E0130 10:57:25.906001 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0229a79c-920d-482f-ab9f-92b042caee36" containerName="extract-utilities" Jan 30 10:57:25 crc kubenswrapper[4869]: I0130 10:57:25.906008 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0229a79c-920d-482f-ab9f-92b042caee36" containerName="extract-utilities" Jan 30 10:57:25 crc kubenswrapper[4869]: E0130 10:57:25.906018 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0229a79c-920d-482f-ab9f-92b042caee36" containerName="registry-server" Jan 30 10:57:25 crc kubenswrapper[4869]: I0130 10:57:25.906024 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0229a79c-920d-482f-ab9f-92b042caee36" containerName="registry-server" Jan 30 10:57:25 crc kubenswrapper[4869]: E0130 10:57:25.906035 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e483b08b-bf0a-4fff-9956-4b727b09edfd" containerName="pruner" Jan 30 10:57:25 crc kubenswrapper[4869]: I0130 10:57:25.906041 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e483b08b-bf0a-4fff-9956-4b727b09edfd" containerName="pruner" Jan 30 10:57:25 crc kubenswrapper[4869]: I0130 10:57:25.906142 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="e483b08b-bf0a-4fff-9956-4b727b09edfd" containerName="pruner" Jan 30 10:57:25 crc kubenswrapper[4869]: I0130 10:57:25.906156 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="0229a79c-920d-482f-ab9f-92b042caee36" containerName="registry-server" Jan 30 10:57:25 crc kubenswrapper[4869]: I0130 10:57:25.906489 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 30 10:57:25 crc kubenswrapper[4869]: I0130 10:57:25.909068 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 30 10:57:25 crc kubenswrapper[4869]: I0130 10:57:25.909620 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 30 10:57:25 crc kubenswrapper[4869]: I0130 10:57:25.912769 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.039439 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1cc76d4c-23ec-4747-8441-727344a3ccff-kubelet-dir\") pod \"installer-9-crc\" (UID: \"1cc76d4c-23ec-4747-8441-727344a3ccff\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.039837 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/1cc76d4c-23ec-4747-8441-727344a3ccff-var-lock\") pod \"installer-9-crc\" (UID: \"1cc76d4c-23ec-4747-8441-727344a3ccff\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.039905 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1cc76d4c-23ec-4747-8441-727344a3ccff-kube-api-access\") pod \"installer-9-crc\" (UID: \"1cc76d4c-23ec-4747-8441-727344a3ccff\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.064738 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8g8t8" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.140124 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7pvg\" (UniqueName: \"kubernetes.io/projected/a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5-kube-api-access-x7pvg\") pod \"a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5\" (UID: \"a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5\") " Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.141975 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5-catalog-content\") pod \"a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5\" (UID: \"a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5\") " Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.141475 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0229a79c-920d-482f-ab9f-92b042caee36" path="/var/lib/kubelet/pods/0229a79c-920d-482f-ab9f-92b042caee36/volumes" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.158932 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5-utilities\") pod \"a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5\" (UID: \"a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5\") " Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.144878 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5-kube-api-access-x7pvg" (OuterVolumeSpecName: "kube-api-access-x7pvg") pod "a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5" (UID: "a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5"). InnerVolumeSpecName "kube-api-access-x7pvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.159166 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1cc76d4c-23ec-4747-8441-727344a3ccff-kubelet-dir\") pod \"installer-9-crc\" (UID: \"1cc76d4c-23ec-4747-8441-727344a3ccff\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.159197 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/1cc76d4c-23ec-4747-8441-727344a3ccff-var-lock\") pod \"installer-9-crc\" (UID: \"1cc76d4c-23ec-4747-8441-727344a3ccff\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.159293 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1cc76d4c-23ec-4747-8441-727344a3ccff-kubelet-dir\") pod \"installer-9-crc\" (UID: \"1cc76d4c-23ec-4747-8441-727344a3ccff\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.159340 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1cc76d4c-23ec-4747-8441-727344a3ccff-kube-api-access\") pod \"installer-9-crc\" (UID: \"1cc76d4c-23ec-4747-8441-727344a3ccff\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.159352 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/1cc76d4c-23ec-4747-8441-727344a3ccff-var-lock\") pod \"installer-9-crc\" (UID: \"1cc76d4c-23ec-4747-8441-727344a3ccff\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.159450 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7pvg\" (UniqueName: \"kubernetes.io/projected/a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5-kube-api-access-x7pvg\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.160416 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5-utilities" (OuterVolumeSpecName: "utilities") pod "a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5" (UID: "a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.177021 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1cc76d4c-23ec-4747-8441-727344a3ccff-kube-api-access\") pod \"installer-9-crc\" (UID: \"1cc76d4c-23ec-4747-8441-727344a3ccff\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.179608 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5" (UID: "a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.223072 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.261095 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.261363 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.445853 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 30 10:57:26 crc kubenswrapper[4869]: W0130 10:57:26.459487 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod1cc76d4c_23ec_4747_8441_727344a3ccff.slice/crio-548ce00d38000e5522e9121092fea1c37cef8af4989a2da156a9908f0224146c WatchSource:0}: Error finding container 548ce00d38000e5522e9121092fea1c37cef8af4989a2da156a9908f0224146c: Status 404 returned error can't find the container with id 548ce00d38000e5522e9121092fea1c37cef8af4989a2da156a9908f0224146c Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.589537 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v67cm" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.665183 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71baa75f-e5ac-48ac-a224-e943ec26090c-utilities\") pod \"71baa75f-e5ac-48ac-a224-e943ec26090c\" (UID: \"71baa75f-e5ac-48ac-a224-e943ec26090c\") " Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.665241 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t7ssh\" (UniqueName: \"kubernetes.io/projected/71baa75f-e5ac-48ac-a224-e943ec26090c-kube-api-access-t7ssh\") pod \"71baa75f-e5ac-48ac-a224-e943ec26090c\" (UID: \"71baa75f-e5ac-48ac-a224-e943ec26090c\") " Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.665305 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71baa75f-e5ac-48ac-a224-e943ec26090c-catalog-content\") pod \"71baa75f-e5ac-48ac-a224-e943ec26090c\" (UID: \"71baa75f-e5ac-48ac-a224-e943ec26090c\") " Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.666051 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71baa75f-e5ac-48ac-a224-e943ec26090c-utilities" (OuterVolumeSpecName: "utilities") pod "71baa75f-e5ac-48ac-a224-e943ec26090c" (UID: "71baa75f-e5ac-48ac-a224-e943ec26090c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.669565 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71baa75f-e5ac-48ac-a224-e943ec26090c-kube-api-access-t7ssh" (OuterVolumeSpecName: "kube-api-access-t7ssh") pod "71baa75f-e5ac-48ac-a224-e943ec26090c" (UID: "71baa75f-e5ac-48ac-a224-e943ec26090c"). InnerVolumeSpecName "kube-api-access-t7ssh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.766369 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71baa75f-e5ac-48ac-a224-e943ec26090c-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.766403 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t7ssh\" (UniqueName: \"kubernetes.io/projected/71baa75f-e5ac-48ac-a224-e943ec26090c-kube-api-access-t7ssh\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.769549 4869 generic.go:334] "Generic (PLEG): container finished" podID="71baa75f-e5ac-48ac-a224-e943ec26090c" containerID="7dbaba4695b0915c7a7d39f43a45aaeafa5e9f14c439d417aac6571b186b97d1" exitCode=0 Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.769621 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v67cm" event={"ID":"71baa75f-e5ac-48ac-a224-e943ec26090c","Type":"ContainerDied","Data":"7dbaba4695b0915c7a7d39f43a45aaeafa5e9f14c439d417aac6571b186b97d1"} Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.769639 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v67cm" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.769652 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v67cm" event={"ID":"71baa75f-e5ac-48ac-a224-e943ec26090c","Type":"ContainerDied","Data":"062f8792c8fc7027d054f6f006a1de81b9b955acb06b0d2d769c149dc348fa12"} Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.769675 4869 scope.go:117] "RemoveContainer" containerID="7dbaba4695b0915c7a7d39f43a45aaeafa5e9f14c439d417aac6571b186b97d1" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.773282 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"1cc76d4c-23ec-4747-8441-727344a3ccff","Type":"ContainerStarted","Data":"548ce00d38000e5522e9121092fea1c37cef8af4989a2da156a9908f0224146c"} Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.775226 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8g8t8" event={"ID":"a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5","Type":"ContainerDied","Data":"912bd8fd2986d2ed7eec1f2d63a4b3664ec375cb098b94d6742f4f37ca68f3ca"} Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.775300 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8g8t8" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.787845 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71baa75f-e5ac-48ac-a224-e943ec26090c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "71baa75f-e5ac-48ac-a224-e943ec26090c" (UID: "71baa75f-e5ac-48ac-a224-e943ec26090c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.795013 4869 scope.go:117] "RemoveContainer" containerID="c6b405d4a4720d3e0ec20007cc17796e5eade6733a8f2c59fdebc1ff611add38" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.803188 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8g8t8"] Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.806371 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8g8t8"] Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.830265 4869 scope.go:117] "RemoveContainer" containerID="ccf9adc5f0bd2614f3e3d741e57dc9d9d3a74325b2dc7acb15107ccd8e686b6c" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.850077 4869 scope.go:117] "RemoveContainer" containerID="7dbaba4695b0915c7a7d39f43a45aaeafa5e9f14c439d417aac6571b186b97d1" Jan 30 10:57:26 crc kubenswrapper[4869]: E0130 10:57:26.850559 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7dbaba4695b0915c7a7d39f43a45aaeafa5e9f14c439d417aac6571b186b97d1\": container with ID starting with 7dbaba4695b0915c7a7d39f43a45aaeafa5e9f14c439d417aac6571b186b97d1 not found: ID does not exist" containerID="7dbaba4695b0915c7a7d39f43a45aaeafa5e9f14c439d417aac6571b186b97d1" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.850607 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dbaba4695b0915c7a7d39f43a45aaeafa5e9f14c439d417aac6571b186b97d1"} err="failed to get container status \"7dbaba4695b0915c7a7d39f43a45aaeafa5e9f14c439d417aac6571b186b97d1\": rpc error: code = NotFound desc = could not find container \"7dbaba4695b0915c7a7d39f43a45aaeafa5e9f14c439d417aac6571b186b97d1\": container with ID starting with 7dbaba4695b0915c7a7d39f43a45aaeafa5e9f14c439d417aac6571b186b97d1 not found: ID does not exist" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.850666 4869 scope.go:117] "RemoveContainer" containerID="c6b405d4a4720d3e0ec20007cc17796e5eade6733a8f2c59fdebc1ff611add38" Jan 30 10:57:26 crc kubenswrapper[4869]: E0130 10:57:26.850942 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6b405d4a4720d3e0ec20007cc17796e5eade6733a8f2c59fdebc1ff611add38\": container with ID starting with c6b405d4a4720d3e0ec20007cc17796e5eade6733a8f2c59fdebc1ff611add38 not found: ID does not exist" containerID="c6b405d4a4720d3e0ec20007cc17796e5eade6733a8f2c59fdebc1ff611add38" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.850963 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6b405d4a4720d3e0ec20007cc17796e5eade6733a8f2c59fdebc1ff611add38"} err="failed to get container status \"c6b405d4a4720d3e0ec20007cc17796e5eade6733a8f2c59fdebc1ff611add38\": rpc error: code = NotFound desc = could not find container \"c6b405d4a4720d3e0ec20007cc17796e5eade6733a8f2c59fdebc1ff611add38\": container with ID starting with c6b405d4a4720d3e0ec20007cc17796e5eade6733a8f2c59fdebc1ff611add38 not found: ID does not exist" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.850977 4869 scope.go:117] "RemoveContainer" containerID="ccf9adc5f0bd2614f3e3d741e57dc9d9d3a74325b2dc7acb15107ccd8e686b6c" Jan 30 10:57:26 crc kubenswrapper[4869]: E0130 10:57:26.851209 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ccf9adc5f0bd2614f3e3d741e57dc9d9d3a74325b2dc7acb15107ccd8e686b6c\": container with ID starting with ccf9adc5f0bd2614f3e3d741e57dc9d9d3a74325b2dc7acb15107ccd8e686b6c not found: ID does not exist" containerID="ccf9adc5f0bd2614f3e3d741e57dc9d9d3a74325b2dc7acb15107ccd8e686b6c" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.851240 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccf9adc5f0bd2614f3e3d741e57dc9d9d3a74325b2dc7acb15107ccd8e686b6c"} err="failed to get container status \"ccf9adc5f0bd2614f3e3d741e57dc9d9d3a74325b2dc7acb15107ccd8e686b6c\": rpc error: code = NotFound desc = could not find container \"ccf9adc5f0bd2614f3e3d741e57dc9d9d3a74325b2dc7acb15107ccd8e686b6c\": container with ID starting with ccf9adc5f0bd2614f3e3d741e57dc9d9d3a74325b2dc7acb15107ccd8e686b6c not found: ID does not exist" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.851261 4869 scope.go:117] "RemoveContainer" containerID="a3d497ec914f7b2ecfbec197d2d1438d1095ffdb2995b7755dd8cd185138aae5" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.867480 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71baa75f-e5ac-48ac-a224-e943ec26090c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.868571 4869 scope.go:117] "RemoveContainer" containerID="1e043e47bd678f540dc9083695b9aecf10d9c21646a324f6c05d523977858810" Jan 30 10:57:26 crc kubenswrapper[4869]: I0130 10:57:26.885251 4869 scope.go:117] "RemoveContainer" containerID="c147e1e51f0cdaab4ef3e12e6527eb931e3731509b3c05660a78296758d73dfa" Jan 30 10:57:27 crc kubenswrapper[4869]: I0130 10:57:27.095136 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v67cm"] Jan 30 10:57:27 crc kubenswrapper[4869]: I0130 10:57:27.099997 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-v67cm"] Jan 30 10:57:27 crc kubenswrapper[4869]: I0130 10:57:27.782998 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"1cc76d4c-23ec-4747-8441-727344a3ccff","Type":"ContainerStarted","Data":"9f696f3ef130ec93578b505f273622e051d2417207ec42915c94b0c1012f05b1"} Jan 30 10:57:27 crc kubenswrapper[4869]: I0130 10:57:27.797115 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=2.7970955010000003 podStartE2EDuration="2.797095501s" podCreationTimestamp="2026-01-30 10:57:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:57:27.796350958 +0000 UTC m=+198.346227024" watchObservedRunningTime="2026-01-30 10:57:27.797095501 +0000 UTC m=+198.346971567" Jan 30 10:57:28 crc kubenswrapper[4869]: I0130 10:57:28.141003 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71baa75f-e5ac-48ac-a224-e943ec26090c" path="/var/lib/kubelet/pods/71baa75f-e5ac-48ac-a224-e943ec26090c/volumes" Jan 30 10:57:28 crc kubenswrapper[4869]: I0130 10:57:28.141755 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5" path="/var/lib/kubelet/pods/a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5/volumes" Jan 30 10:57:29 crc kubenswrapper[4869]: I0130 10:57:29.096040 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gg5f8" Jan 30 10:57:29 crc kubenswrapper[4869]: I0130 10:57:29.096103 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gg5f8" Jan 30 10:57:29 crc kubenswrapper[4869]: I0130 10:57:29.135208 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gg5f8" Jan 30 10:57:29 crc kubenswrapper[4869]: I0130 10:57:29.842760 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gg5f8" Jan 30 10:57:33 crc kubenswrapper[4869]: I0130 10:57:33.712213 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gg5f8"] Jan 30 10:57:33 crc kubenswrapper[4869]: I0130 10:57:33.712895 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gg5f8" podUID="46b8829e-b1f9-4b12-bcbf-da01b6ef5840" containerName="registry-server" containerID="cri-o://b2b90f4e8a0f8bff0530f545546238f7408bf0ef907ac4497e2f979cce7571bd" gracePeriod=2 Jan 30 10:57:34 crc kubenswrapper[4869]: I0130 10:57:34.090503 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gg5f8" Jan 30 10:57:34 crc kubenswrapper[4869]: I0130 10:57:34.161438 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46b8829e-b1f9-4b12-bcbf-da01b6ef5840-catalog-content\") pod \"46b8829e-b1f9-4b12-bcbf-da01b6ef5840\" (UID: \"46b8829e-b1f9-4b12-bcbf-da01b6ef5840\") " Jan 30 10:57:34 crc kubenswrapper[4869]: I0130 10:57:34.161536 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46b8829e-b1f9-4b12-bcbf-da01b6ef5840-utilities\") pod \"46b8829e-b1f9-4b12-bcbf-da01b6ef5840\" (UID: \"46b8829e-b1f9-4b12-bcbf-da01b6ef5840\") " Jan 30 10:57:34 crc kubenswrapper[4869]: I0130 10:57:34.161609 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zcwq\" (UniqueName: \"kubernetes.io/projected/46b8829e-b1f9-4b12-bcbf-da01b6ef5840-kube-api-access-4zcwq\") pod \"46b8829e-b1f9-4b12-bcbf-da01b6ef5840\" (UID: \"46b8829e-b1f9-4b12-bcbf-da01b6ef5840\") " Jan 30 10:57:34 crc kubenswrapper[4869]: I0130 10:57:34.162472 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46b8829e-b1f9-4b12-bcbf-da01b6ef5840-utilities" (OuterVolumeSpecName: "utilities") pod "46b8829e-b1f9-4b12-bcbf-da01b6ef5840" (UID: "46b8829e-b1f9-4b12-bcbf-da01b6ef5840"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:57:34 crc kubenswrapper[4869]: I0130 10:57:34.166885 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46b8829e-b1f9-4b12-bcbf-da01b6ef5840-kube-api-access-4zcwq" (OuterVolumeSpecName: "kube-api-access-4zcwq") pod "46b8829e-b1f9-4b12-bcbf-da01b6ef5840" (UID: "46b8829e-b1f9-4b12-bcbf-da01b6ef5840"). InnerVolumeSpecName "kube-api-access-4zcwq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:57:34 crc kubenswrapper[4869]: I0130 10:57:34.236314 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46b8829e-b1f9-4b12-bcbf-da01b6ef5840-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "46b8829e-b1f9-4b12-bcbf-da01b6ef5840" (UID: "46b8829e-b1f9-4b12-bcbf-da01b6ef5840"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:57:34 crc kubenswrapper[4869]: I0130 10:57:34.262841 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46b8829e-b1f9-4b12-bcbf-da01b6ef5840-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:34 crc kubenswrapper[4869]: I0130 10:57:34.262891 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zcwq\" (UniqueName: \"kubernetes.io/projected/46b8829e-b1f9-4b12-bcbf-da01b6ef5840-kube-api-access-4zcwq\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:34 crc kubenswrapper[4869]: I0130 10:57:34.262903 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46b8829e-b1f9-4b12-bcbf-da01b6ef5840-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:34 crc kubenswrapper[4869]: I0130 10:57:34.818020 4869 generic.go:334] "Generic (PLEG): container finished" podID="46b8829e-b1f9-4b12-bcbf-da01b6ef5840" containerID="b2b90f4e8a0f8bff0530f545546238f7408bf0ef907ac4497e2f979cce7571bd" exitCode=0 Jan 30 10:57:34 crc kubenswrapper[4869]: I0130 10:57:34.818065 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gg5f8" event={"ID":"46b8829e-b1f9-4b12-bcbf-da01b6ef5840","Type":"ContainerDied","Data":"b2b90f4e8a0f8bff0530f545546238f7408bf0ef907ac4497e2f979cce7571bd"} Jan 30 10:57:34 crc kubenswrapper[4869]: I0130 10:57:34.818094 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gg5f8" event={"ID":"46b8829e-b1f9-4b12-bcbf-da01b6ef5840","Type":"ContainerDied","Data":"224ee287aff00e076cab51cd4c324fc0e31a06a722cc37f5fcd5696d50831f88"} Jan 30 10:57:34 crc kubenswrapper[4869]: I0130 10:57:34.818116 4869 scope.go:117] "RemoveContainer" containerID="b2b90f4e8a0f8bff0530f545546238f7408bf0ef907ac4497e2f979cce7571bd" Jan 30 10:57:34 crc kubenswrapper[4869]: I0130 10:57:34.818147 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gg5f8" Jan 30 10:57:34 crc kubenswrapper[4869]: I0130 10:57:34.831818 4869 scope.go:117] "RemoveContainer" containerID="5a1c787c12c4501ae88515c187975b5f71327073d734705bcf576de68b55fbe8" Jan 30 10:57:34 crc kubenswrapper[4869]: I0130 10:57:34.848586 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gg5f8"] Jan 30 10:57:34 crc kubenswrapper[4869]: I0130 10:57:34.848745 4869 scope.go:117] "RemoveContainer" containerID="c63703a3c1013118e48a3c457a39cf5f9d690d9cf33addea470005d0aa04b82a" Jan 30 10:57:34 crc kubenswrapper[4869]: I0130 10:57:34.852526 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gg5f8"] Jan 30 10:57:34 crc kubenswrapper[4869]: I0130 10:57:34.871881 4869 scope.go:117] "RemoveContainer" containerID="b2b90f4e8a0f8bff0530f545546238f7408bf0ef907ac4497e2f979cce7571bd" Jan 30 10:57:34 crc kubenswrapper[4869]: E0130 10:57:34.872287 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2b90f4e8a0f8bff0530f545546238f7408bf0ef907ac4497e2f979cce7571bd\": container with ID starting with b2b90f4e8a0f8bff0530f545546238f7408bf0ef907ac4497e2f979cce7571bd not found: ID does not exist" containerID="b2b90f4e8a0f8bff0530f545546238f7408bf0ef907ac4497e2f979cce7571bd" Jan 30 10:57:34 crc kubenswrapper[4869]: I0130 10:57:34.872322 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2b90f4e8a0f8bff0530f545546238f7408bf0ef907ac4497e2f979cce7571bd"} err="failed to get container status \"b2b90f4e8a0f8bff0530f545546238f7408bf0ef907ac4497e2f979cce7571bd\": rpc error: code = NotFound desc = could not find container \"b2b90f4e8a0f8bff0530f545546238f7408bf0ef907ac4497e2f979cce7571bd\": container with ID starting with b2b90f4e8a0f8bff0530f545546238f7408bf0ef907ac4497e2f979cce7571bd not found: ID does not exist" Jan 30 10:57:34 crc kubenswrapper[4869]: I0130 10:57:34.872341 4869 scope.go:117] "RemoveContainer" containerID="5a1c787c12c4501ae88515c187975b5f71327073d734705bcf576de68b55fbe8" Jan 30 10:57:34 crc kubenswrapper[4869]: E0130 10:57:34.872735 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a1c787c12c4501ae88515c187975b5f71327073d734705bcf576de68b55fbe8\": container with ID starting with 5a1c787c12c4501ae88515c187975b5f71327073d734705bcf576de68b55fbe8 not found: ID does not exist" containerID="5a1c787c12c4501ae88515c187975b5f71327073d734705bcf576de68b55fbe8" Jan 30 10:57:34 crc kubenswrapper[4869]: I0130 10:57:34.872779 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a1c787c12c4501ae88515c187975b5f71327073d734705bcf576de68b55fbe8"} err="failed to get container status \"5a1c787c12c4501ae88515c187975b5f71327073d734705bcf576de68b55fbe8\": rpc error: code = NotFound desc = could not find container \"5a1c787c12c4501ae88515c187975b5f71327073d734705bcf576de68b55fbe8\": container with ID starting with 5a1c787c12c4501ae88515c187975b5f71327073d734705bcf576de68b55fbe8 not found: ID does not exist" Jan 30 10:57:34 crc kubenswrapper[4869]: I0130 10:57:34.872806 4869 scope.go:117] "RemoveContainer" containerID="c63703a3c1013118e48a3c457a39cf5f9d690d9cf33addea470005d0aa04b82a" Jan 30 10:57:34 crc kubenswrapper[4869]: E0130 10:57:34.873326 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c63703a3c1013118e48a3c457a39cf5f9d690d9cf33addea470005d0aa04b82a\": container with ID starting with c63703a3c1013118e48a3c457a39cf5f9d690d9cf33addea470005d0aa04b82a not found: ID does not exist" containerID="c63703a3c1013118e48a3c457a39cf5f9d690d9cf33addea470005d0aa04b82a" Jan 30 10:57:34 crc kubenswrapper[4869]: I0130 10:57:34.873349 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c63703a3c1013118e48a3c457a39cf5f9d690d9cf33addea470005d0aa04b82a"} err="failed to get container status \"c63703a3c1013118e48a3c457a39cf5f9d690d9cf33addea470005d0aa04b82a\": rpc error: code = NotFound desc = could not find container \"c63703a3c1013118e48a3c457a39cf5f9d690d9cf33addea470005d0aa04b82a\": container with ID starting with c63703a3c1013118e48a3c457a39cf5f9d690d9cf33addea470005d0aa04b82a not found: ID does not exist" Jan 30 10:57:36 crc kubenswrapper[4869]: I0130 10:57:36.140586 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46b8829e-b1f9-4b12-bcbf-da01b6ef5840" path="/var/lib/kubelet/pods/46b8829e-b1f9-4b12-bcbf-da01b6ef5840/volumes" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.098546 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" podUID="a945a8a2-155f-4e1d-a636-a04711e6e40c" containerName="oauth-openshift" containerID="cri-o://86f6caadf3f31c0fc78b1dff7f3803ed8af965e2d928ca0afda38be1d7b84641" gracePeriod=15 Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.486151 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.602461 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-serving-cert\") pod \"a945a8a2-155f-4e1d-a636-a04711e6e40c\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.602532 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-user-template-error\") pod \"a945a8a2-155f-4e1d-a636-a04711e6e40c\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.602564 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a945a8a2-155f-4e1d-a636-a04711e6e40c-audit-policies\") pod \"a945a8a2-155f-4e1d-a636-a04711e6e40c\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.602602 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-service-ca\") pod \"a945a8a2-155f-4e1d-a636-a04711e6e40c\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.602630 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-ocp-branding-template\") pod \"a945a8a2-155f-4e1d-a636-a04711e6e40c\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.602672 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-session\") pod \"a945a8a2-155f-4e1d-a636-a04711e6e40c\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.602699 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a945a8a2-155f-4e1d-a636-a04711e6e40c-audit-dir\") pod \"a945a8a2-155f-4e1d-a636-a04711e6e40c\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.602910 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a945a8a2-155f-4e1d-a636-a04711e6e40c-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "a945a8a2-155f-4e1d-a636-a04711e6e40c" (UID: "a945a8a2-155f-4e1d-a636-a04711e6e40c"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.602883 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-trusted-ca-bundle\") pod \"a945a8a2-155f-4e1d-a636-a04711e6e40c\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.603607 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-user-template-provider-selection\") pod \"a945a8a2-155f-4e1d-a636-a04711e6e40c\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.603203 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a945a8a2-155f-4e1d-a636-a04711e6e40c-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "a945a8a2-155f-4e1d-a636-a04711e6e40c" (UID: "a945a8a2-155f-4e1d-a636-a04711e6e40c"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.603655 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-user-template-login\") pod \"a945a8a2-155f-4e1d-a636-a04711e6e40c\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.603750 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-router-certs\") pod \"a945a8a2-155f-4e1d-a636-a04711e6e40c\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.603793 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-cliconfig\") pod \"a945a8a2-155f-4e1d-a636-a04711e6e40c\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.603315 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "a945a8a2-155f-4e1d-a636-a04711e6e40c" (UID: "a945a8a2-155f-4e1d-a636-a04711e6e40c"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.603820 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wjknl\" (UniqueName: \"kubernetes.io/projected/a945a8a2-155f-4e1d-a636-a04711e6e40c-kube-api-access-wjknl\") pod \"a945a8a2-155f-4e1d-a636-a04711e6e40c\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.603399 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "a945a8a2-155f-4e1d-a636-a04711e6e40c" (UID: "a945a8a2-155f-4e1d-a636-a04711e6e40c"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.603856 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-user-idp-0-file-data\") pod \"a945a8a2-155f-4e1d-a636-a04711e6e40c\" (UID: \"a945a8a2-155f-4e1d-a636-a04711e6e40c\") " Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.604113 4869 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a945a8a2-155f-4e1d-a636-a04711e6e40c-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.604129 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.604141 4869 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a945a8a2-155f-4e1d-a636-a04711e6e40c-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.604154 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.607785 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "a945a8a2-155f-4e1d-a636-a04711e6e40c" (UID: "a945a8a2-155f-4e1d-a636-a04711e6e40c"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.608960 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "a945a8a2-155f-4e1d-a636-a04711e6e40c" (UID: "a945a8a2-155f-4e1d-a636-a04711e6e40c"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.609242 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "a945a8a2-155f-4e1d-a636-a04711e6e40c" (UID: "a945a8a2-155f-4e1d-a636-a04711e6e40c"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.609920 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "a945a8a2-155f-4e1d-a636-a04711e6e40c" (UID: "a945a8a2-155f-4e1d-a636-a04711e6e40c"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.610218 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a945a8a2-155f-4e1d-a636-a04711e6e40c-kube-api-access-wjknl" (OuterVolumeSpecName: "kube-api-access-wjknl") pod "a945a8a2-155f-4e1d-a636-a04711e6e40c" (UID: "a945a8a2-155f-4e1d-a636-a04711e6e40c"). InnerVolumeSpecName "kube-api-access-wjknl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.610412 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "a945a8a2-155f-4e1d-a636-a04711e6e40c" (UID: "a945a8a2-155f-4e1d-a636-a04711e6e40c"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.610851 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "a945a8a2-155f-4e1d-a636-a04711e6e40c" (UID: "a945a8a2-155f-4e1d-a636-a04711e6e40c"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.616518 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "a945a8a2-155f-4e1d-a636-a04711e6e40c" (UID: "a945a8a2-155f-4e1d-a636-a04711e6e40c"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.616686 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "a945a8a2-155f-4e1d-a636-a04711e6e40c" (UID: "a945a8a2-155f-4e1d-a636-a04711e6e40c"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.619302 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "a945a8a2-155f-4e1d-a636-a04711e6e40c" (UID: "a945a8a2-155f-4e1d-a636-a04711e6e40c"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.705405 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wjknl\" (UniqueName: \"kubernetes.io/projected/a945a8a2-155f-4e1d-a636-a04711e6e40c-kube-api-access-wjknl\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.705440 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.705451 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.705460 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.705470 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.705479 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.705489 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.705500 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.705508 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.705516 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a945a8a2-155f-4e1d-a636-a04711e6e40c-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.874580 4869 generic.go:334] "Generic (PLEG): container finished" podID="a945a8a2-155f-4e1d-a636-a04711e6e40c" containerID="86f6caadf3f31c0fc78b1dff7f3803ed8af965e2d928ca0afda38be1d7b84641" exitCode=0 Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.874631 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" event={"ID":"a945a8a2-155f-4e1d-a636-a04711e6e40c","Type":"ContainerDied","Data":"86f6caadf3f31c0fc78b1dff7f3803ed8af965e2d928ca0afda38be1d7b84641"} Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.874663 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" event={"ID":"a945a8a2-155f-4e1d-a636-a04711e6e40c","Type":"ContainerDied","Data":"4dd9d0090a93f6ab2cae1daebdfe5e21502ae79974f478fc9bad2508ce2fa5a2"} Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.874672 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-dqfj8" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.874681 4869 scope.go:117] "RemoveContainer" containerID="86f6caadf3f31c0fc78b1dff7f3803ed8af965e2d928ca0afda38be1d7b84641" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.892837 4869 scope.go:117] "RemoveContainer" containerID="86f6caadf3f31c0fc78b1dff7f3803ed8af965e2d928ca0afda38be1d7b84641" Jan 30 10:57:45 crc kubenswrapper[4869]: E0130 10:57:45.893156 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86f6caadf3f31c0fc78b1dff7f3803ed8af965e2d928ca0afda38be1d7b84641\": container with ID starting with 86f6caadf3f31c0fc78b1dff7f3803ed8af965e2d928ca0afda38be1d7b84641 not found: ID does not exist" containerID="86f6caadf3f31c0fc78b1dff7f3803ed8af965e2d928ca0afda38be1d7b84641" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.893188 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86f6caadf3f31c0fc78b1dff7f3803ed8af965e2d928ca0afda38be1d7b84641"} err="failed to get container status \"86f6caadf3f31c0fc78b1dff7f3803ed8af965e2d928ca0afda38be1d7b84641\": rpc error: code = NotFound desc = could not find container \"86f6caadf3f31c0fc78b1dff7f3803ed8af965e2d928ca0afda38be1d7b84641\": container with ID starting with 86f6caadf3f31c0fc78b1dff7f3803ed8af965e2d928ca0afda38be1d7b84641 not found: ID does not exist" Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.901950 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dqfj8"] Jan 30 10:57:45 crc kubenswrapper[4869]: I0130 10:57:45.905075 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dqfj8"] Jan 30 10:57:46 crc kubenswrapper[4869]: I0130 10:57:46.140919 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a945a8a2-155f-4e1d-a636-a04711e6e40c" path="/var/lib/kubelet/pods/a945a8a2-155f-4e1d-a636-a04711e6e40c/volumes" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.766579 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-57569d6b9d-jj59c"] Jan 30 10:57:47 crc kubenswrapper[4869]: E0130 10:57:47.766900 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46b8829e-b1f9-4b12-bcbf-da01b6ef5840" containerName="registry-server" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.766920 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="46b8829e-b1f9-4b12-bcbf-da01b6ef5840" containerName="registry-server" Jan 30 10:57:47 crc kubenswrapper[4869]: E0130 10:57:47.766949 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46b8829e-b1f9-4b12-bcbf-da01b6ef5840" containerName="extract-utilities" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.766961 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="46b8829e-b1f9-4b12-bcbf-da01b6ef5840" containerName="extract-utilities" Jan 30 10:57:47 crc kubenswrapper[4869]: E0130 10:57:47.766982 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5" containerName="extract-utilities" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.766994 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5" containerName="extract-utilities" Jan 30 10:57:47 crc kubenswrapper[4869]: E0130 10:57:47.767015 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5" containerName="registry-server" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.767027 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5" containerName="registry-server" Jan 30 10:57:47 crc kubenswrapper[4869]: E0130 10:57:47.767047 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71baa75f-e5ac-48ac-a224-e943ec26090c" containerName="registry-server" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.767059 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="71baa75f-e5ac-48ac-a224-e943ec26090c" containerName="registry-server" Jan 30 10:57:47 crc kubenswrapper[4869]: E0130 10:57:47.767081 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46b8829e-b1f9-4b12-bcbf-da01b6ef5840" containerName="extract-content" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.767093 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="46b8829e-b1f9-4b12-bcbf-da01b6ef5840" containerName="extract-content" Jan 30 10:57:47 crc kubenswrapper[4869]: E0130 10:57:47.767112 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71baa75f-e5ac-48ac-a224-e943ec26090c" containerName="extract-utilities" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.767124 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="71baa75f-e5ac-48ac-a224-e943ec26090c" containerName="extract-utilities" Jan 30 10:57:47 crc kubenswrapper[4869]: E0130 10:57:47.767141 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a945a8a2-155f-4e1d-a636-a04711e6e40c" containerName="oauth-openshift" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.767154 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a945a8a2-155f-4e1d-a636-a04711e6e40c" containerName="oauth-openshift" Jan 30 10:57:47 crc kubenswrapper[4869]: E0130 10:57:47.767169 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71baa75f-e5ac-48ac-a224-e943ec26090c" containerName="extract-content" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.767181 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="71baa75f-e5ac-48ac-a224-e943ec26090c" containerName="extract-content" Jan 30 10:57:47 crc kubenswrapper[4869]: E0130 10:57:47.767194 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5" containerName="extract-content" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.767206 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5" containerName="extract-content" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.767369 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a618aacd-0b8e-4e5e-ba1e-f6e9871bdce5" containerName="registry-server" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.767390 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="46b8829e-b1f9-4b12-bcbf-da01b6ef5840" containerName="registry-server" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.767413 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a945a8a2-155f-4e1d-a636-a04711e6e40c" containerName="oauth-openshift" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.767428 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="71baa75f-e5ac-48ac-a224-e943ec26090c" containerName="registry-server" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.768007 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.771802 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.772355 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.772617 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.774499 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.775067 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.775138 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.776145 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.776466 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.776591 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.776852 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.778188 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.780028 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.782317 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-57569d6b9d-jj59c"] Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.784042 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.785854 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.790626 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.832865 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b448031c-063d-4407-aa70-05a8fdadaed9-audit-dir\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.832908 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-system-session\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.832933 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.832957 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.832977 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-system-router-certs\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.833037 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-user-template-login\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.833060 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wj7pb\" (UniqueName: \"kubernetes.io/projected/b448031c-063d-4407-aa70-05a8fdadaed9-kube-api-access-wj7pb\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.833083 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.833099 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-system-service-ca\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.833217 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.833262 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.833299 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b448031c-063d-4407-aa70-05a8fdadaed9-audit-policies\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.833324 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-user-template-error\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.833351 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.934329 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-system-session\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.934413 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b448031c-063d-4407-aa70-05a8fdadaed9-audit-dir\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.934452 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.934493 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.934530 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-system-router-certs\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.934576 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-user-template-login\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.934622 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wj7pb\" (UniqueName: \"kubernetes.io/projected/b448031c-063d-4407-aa70-05a8fdadaed9-kube-api-access-wj7pb\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.934666 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.934702 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-system-service-ca\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.934773 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.934806 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.934835 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b448031c-063d-4407-aa70-05a8fdadaed9-audit-policies\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.934864 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-user-template-error\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.934900 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.936529 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b448031c-063d-4407-aa70-05a8fdadaed9-audit-policies\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.936959 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b448031c-063d-4407-aa70-05a8fdadaed9-audit-dir\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.937195 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-system-service-ca\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.938232 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.940250 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.939809 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.940497 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-user-template-login\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.940518 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-system-router-certs\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.940781 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.941102 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-system-session\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.941224 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.942558 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-user-template-error\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.943275 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b448031c-063d-4407-aa70-05a8fdadaed9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:47 crc kubenswrapper[4869]: I0130 10:57:47.955404 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wj7pb\" (UniqueName: \"kubernetes.io/projected/b448031c-063d-4407-aa70-05a8fdadaed9-kube-api-access-wj7pb\") pod \"oauth-openshift-57569d6b9d-jj59c\" (UID: \"b448031c-063d-4407-aa70-05a8fdadaed9\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:48 crc kubenswrapper[4869]: I0130 10:57:48.093552 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:48 crc kubenswrapper[4869]: I0130 10:57:48.308638 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-57569d6b9d-jj59c"] Jan 30 10:57:48 crc kubenswrapper[4869]: I0130 10:57:48.890529 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" event={"ID":"b448031c-063d-4407-aa70-05a8fdadaed9","Type":"ContainerStarted","Data":"c9ed0d4caac94f3c1e865134aa4463b4aa7b07e5a4f345b971c729aac0f9f8e1"} Jan 30 10:57:48 crc kubenswrapper[4869]: I0130 10:57:48.890931 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:48 crc kubenswrapper[4869]: I0130 10:57:48.890953 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" event={"ID":"b448031c-063d-4407-aa70-05a8fdadaed9","Type":"ContainerStarted","Data":"0d21f754d3826070a054ecccfee3263d3c524c7b1e9944d5116fc353da918e50"} Jan 30 10:57:48 crc kubenswrapper[4869]: I0130 10:57:48.896381 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" Jan 30 10:57:48 crc kubenswrapper[4869]: I0130 10:57:48.911290 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-57569d6b9d-jj59c" podStartSLOduration=28.911272404 podStartE2EDuration="28.911272404s" podCreationTimestamp="2026-01-30 10:57:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:57:48.909426478 +0000 UTC m=+219.459302544" watchObservedRunningTime="2026-01-30 10:57:48.911272404 +0000 UTC m=+219.461148470" Jan 30 10:57:51 crc kubenswrapper[4869]: I0130 10:57:51.768925 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 10:57:51 crc kubenswrapper[4869]: I0130 10:57:51.770783 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 10:57:51 crc kubenswrapper[4869]: I0130 10:57:51.771096 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 10:57:51 crc kubenswrapper[4869]: I0130 10:57:51.772212 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4"} pod="openshift-machine-config-operator/machine-config-daemon-99lr2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 10:57:51 crc kubenswrapper[4869]: I0130 10:57:51.772519 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" containerID="cri-o://ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4" gracePeriod=600 Jan 30 10:57:51 crc kubenswrapper[4869]: E0130 10:57:51.871532 4869 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef13186b_7f82_4025_97e3_d899be8c207f.slice/crio-conmon-ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4.scope\": RecentStats: unable to find data in memory cache]" Jan 30 10:57:51 crc kubenswrapper[4869]: I0130 10:57:51.909478 4869 generic.go:334] "Generic (PLEG): container finished" podID="ef13186b-7f82-4025-97e3-d899be8c207f" containerID="ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4" exitCode=0 Jan 30 10:57:51 crc kubenswrapper[4869]: I0130 10:57:51.909532 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerDied","Data":"ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4"} Jan 30 10:57:52 crc kubenswrapper[4869]: I0130 10:57:52.918126 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerStarted","Data":"ff0bbf8a76427dc77b8dc6f1bcafe269408c942464d9529fc1f94cde0f90a036"} Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.677454 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m7nxw"] Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.678944 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-m7nxw" podUID="d28fe085-7a0e-4de6-8579-88e9583b87a3" containerName="registry-server" containerID="cri-o://e14637d0ad031188eb163b753afb5ef579d9edccc84ee0bef98e945349d5608d" gracePeriod=30 Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.690570 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gcxdk"] Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.694837 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gcxdk" podUID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" containerName="registry-server" containerID="cri-o://2fe077b01ea0475b048c92fcbda9b7f4612e727195f1f158659f467d3e7581f9" gracePeriod=30 Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.704561 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rkwmf"] Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.704813 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" podUID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" containerName="marketplace-operator" containerID="cri-o://4940af762021649fe2a2f10dab03be9c7650594d14e6eb542d31f2dfe45d3964" gracePeriod=30 Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.719760 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ht8ck"] Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.720039 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ht8ck" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" containerName="registry-server" containerID="cri-o://28108176efa84fbe3b6f2c8d41b3d9bba63e02a9a1602e373c260a4134f00b22" gracePeriod=30 Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.727059 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d68sd"] Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.727908 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.734748 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dhxch"] Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.735117 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-dhxch" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" containerName="registry-server" containerID="cri-o://31176bb44bb5d56bedb53229458b2ecc95c29c22b00f6b78a0b960d5dcc41f79" gracePeriod=30 Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.737377 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d68sd"] Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.840042 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/98cb9d90-57ea-4bf2-8ee4-dbcf18e79293-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-d68sd\" (UID: \"98cb9d90-57ea-4bf2-8ee4-dbcf18e79293\") " pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.840501 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/98cb9d90-57ea-4bf2-8ee4-dbcf18e79293-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-d68sd\" (UID: \"98cb9d90-57ea-4bf2-8ee4-dbcf18e79293\") " pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.840572 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gkssl\" (UniqueName: \"kubernetes.io/projected/98cb9d90-57ea-4bf2-8ee4-dbcf18e79293-kube-api-access-gkssl\") pod \"marketplace-operator-79b997595-d68sd\" (UID: \"98cb9d90-57ea-4bf2-8ee4-dbcf18e79293\") " pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.941852 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/98cb9d90-57ea-4bf2-8ee4-dbcf18e79293-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-d68sd\" (UID: \"98cb9d90-57ea-4bf2-8ee4-dbcf18e79293\") " pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.941910 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/98cb9d90-57ea-4bf2-8ee4-dbcf18e79293-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-d68sd\" (UID: \"98cb9d90-57ea-4bf2-8ee4-dbcf18e79293\") " pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.941930 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gkssl\" (UniqueName: \"kubernetes.io/projected/98cb9d90-57ea-4bf2-8ee4-dbcf18e79293-kube-api-access-gkssl\") pod \"marketplace-operator-79b997595-d68sd\" (UID: \"98cb9d90-57ea-4bf2-8ee4-dbcf18e79293\") " pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.944181 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/98cb9d90-57ea-4bf2-8ee4-dbcf18e79293-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-d68sd\" (UID: \"98cb9d90-57ea-4bf2-8ee4-dbcf18e79293\") " pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.948745 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/98cb9d90-57ea-4bf2-8ee4-dbcf18e79293-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-d68sd\" (UID: \"98cb9d90-57ea-4bf2-8ee4-dbcf18e79293\") " pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.958776 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gkssl\" (UniqueName: \"kubernetes.io/projected/98cb9d90-57ea-4bf2-8ee4-dbcf18e79293-kube-api-access-gkssl\") pod \"marketplace-operator-79b997595-d68sd\" (UID: \"98cb9d90-57ea-4bf2-8ee4-dbcf18e79293\") " pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.976581 4869 generic.go:334] "Generic (PLEG): container finished" podID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" containerID="31176bb44bb5d56bedb53229458b2ecc95c29c22b00f6b78a0b960d5dcc41f79" exitCode=0 Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.976660 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dhxch" event={"ID":"112aad1f-e2f9-41e8-a9c8-1d3b3297528e","Type":"ContainerDied","Data":"31176bb44bb5d56bedb53229458b2ecc95c29c22b00f6b78a0b960d5dcc41f79"} Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.978433 4869 generic.go:334] "Generic (PLEG): container finished" podID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" containerID="28108176efa84fbe3b6f2c8d41b3d9bba63e02a9a1602e373c260a4134f00b22" exitCode=0 Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.978477 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ht8ck" event={"ID":"2db1788d-f11c-49f6-b613-d3ed750c8d8a","Type":"ContainerDied","Data":"28108176efa84fbe3b6f2c8d41b3d9bba63e02a9a1602e373c260a4134f00b22"} Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.980529 4869 generic.go:334] "Generic (PLEG): container finished" podID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" containerID="2fe077b01ea0475b048c92fcbda9b7f4612e727195f1f158659f467d3e7581f9" exitCode=0 Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.980583 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gcxdk" event={"ID":"18c7bbb6-bdf6-4e26-9670-49a30b7dab22","Type":"ContainerDied","Data":"2fe077b01ea0475b048c92fcbda9b7f4612e727195f1f158659f467d3e7581f9"} Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.982655 4869 generic.go:334] "Generic (PLEG): container finished" podID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" containerID="4940af762021649fe2a2f10dab03be9c7650594d14e6eb542d31f2dfe45d3964" exitCode=0 Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.982732 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" event={"ID":"44c5913f-a9a0-4b9f-aa60-c6158d19a38a","Type":"ContainerDied","Data":"4940af762021649fe2a2f10dab03be9c7650594d14e6eb542d31f2dfe45d3964"} Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.985224 4869 generic.go:334] "Generic (PLEG): container finished" podID="d28fe085-7a0e-4de6-8579-88e9583b87a3" containerID="e14637d0ad031188eb163b753afb5ef579d9edccc84ee0bef98e945349d5608d" exitCode=0 Jan 30 10:58:03 crc kubenswrapper[4869]: I0130 10:58:03.985250 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m7nxw" event={"ID":"d28fe085-7a0e-4de6-8579-88e9583b87a3","Type":"ContainerDied","Data":"e14637d0ad031188eb163b753afb5ef579d9edccc84ee0bef98e945349d5608d"} Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.025532 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.092206 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m7nxw" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.199290 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gcxdk" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.201607 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dhxch" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.207817 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.248111 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmh7t\" (UniqueName: \"kubernetes.io/projected/d28fe085-7a0e-4de6-8579-88e9583b87a3-kube-api-access-nmh7t\") pod \"d28fe085-7a0e-4de6-8579-88e9583b87a3\" (UID: \"d28fe085-7a0e-4de6-8579-88e9583b87a3\") " Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.248157 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d28fe085-7a0e-4de6-8579-88e9583b87a3-catalog-content\") pod \"d28fe085-7a0e-4de6-8579-88e9583b87a3\" (UID: \"d28fe085-7a0e-4de6-8579-88e9583b87a3\") " Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.248266 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d28fe085-7a0e-4de6-8579-88e9583b87a3-utilities\") pod \"d28fe085-7a0e-4de6-8579-88e9583b87a3\" (UID: \"d28fe085-7a0e-4de6-8579-88e9583b87a3\") " Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.249832 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d28fe085-7a0e-4de6-8579-88e9583b87a3-utilities" (OuterVolumeSpecName: "utilities") pod "d28fe085-7a0e-4de6-8579-88e9583b87a3" (UID: "d28fe085-7a0e-4de6-8579-88e9583b87a3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.250269 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ht8ck" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.252175 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d28fe085-7a0e-4de6-8579-88e9583b87a3-kube-api-access-nmh7t" (OuterVolumeSpecName: "kube-api-access-nmh7t") pod "d28fe085-7a0e-4de6-8579-88e9583b87a3" (UID: "d28fe085-7a0e-4de6-8579-88e9583b87a3"). InnerVolumeSpecName "kube-api-access-nmh7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.302370 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d68sd"] Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.315961 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d28fe085-7a0e-4de6-8579-88e9583b87a3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d28fe085-7a0e-4de6-8579-88e9583b87a3" (UID: "d28fe085-7a0e-4de6-8579-88e9583b87a3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.349350 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/44c5913f-a9a0-4b9f-aa60-c6158d19a38a-marketplace-operator-metrics\") pod \"44c5913f-a9a0-4b9f-aa60-c6158d19a38a\" (UID: \"44c5913f-a9a0-4b9f-aa60-c6158d19a38a\") " Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.349412 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/112aad1f-e2f9-41e8-a9c8-1d3b3297528e-utilities\") pod \"112aad1f-e2f9-41e8-a9c8-1d3b3297528e\" (UID: \"112aad1f-e2f9-41e8-a9c8-1d3b3297528e\") " Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.349443 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jzsl\" (UniqueName: \"kubernetes.io/projected/112aad1f-e2f9-41e8-a9c8-1d3b3297528e-kube-api-access-7jzsl\") pod \"112aad1f-e2f9-41e8-a9c8-1d3b3297528e\" (UID: \"112aad1f-e2f9-41e8-a9c8-1d3b3297528e\") " Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.349531 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2db1788d-f11c-49f6-b613-d3ed750c8d8a-utilities\") pod \"2db1788d-f11c-49f6-b613-d3ed750c8d8a\" (UID: \"2db1788d-f11c-49f6-b613-d3ed750c8d8a\") " Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.349556 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5l6c\" (UniqueName: \"kubernetes.io/projected/2db1788d-f11c-49f6-b613-d3ed750c8d8a-kube-api-access-l5l6c\") pod \"2db1788d-f11c-49f6-b613-d3ed750c8d8a\" (UID: \"2db1788d-f11c-49f6-b613-d3ed750c8d8a\") " Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.349584 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kch96\" (UniqueName: \"kubernetes.io/projected/18c7bbb6-bdf6-4e26-9670-49a30b7dab22-kube-api-access-kch96\") pod \"18c7bbb6-bdf6-4e26-9670-49a30b7dab22\" (UID: \"18c7bbb6-bdf6-4e26-9670-49a30b7dab22\") " Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.349609 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/112aad1f-e2f9-41e8-a9c8-1d3b3297528e-catalog-content\") pod \"112aad1f-e2f9-41e8-a9c8-1d3b3297528e\" (UID: \"112aad1f-e2f9-41e8-a9c8-1d3b3297528e\") " Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.349636 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/44c5913f-a9a0-4b9f-aa60-c6158d19a38a-marketplace-trusted-ca\") pod \"44c5913f-a9a0-4b9f-aa60-c6158d19a38a\" (UID: \"44c5913f-a9a0-4b9f-aa60-c6158d19a38a\") " Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.349659 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5cxhv\" (UniqueName: \"kubernetes.io/projected/44c5913f-a9a0-4b9f-aa60-c6158d19a38a-kube-api-access-5cxhv\") pod \"44c5913f-a9a0-4b9f-aa60-c6158d19a38a\" (UID: \"44c5913f-a9a0-4b9f-aa60-c6158d19a38a\") " Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.349687 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18c7bbb6-bdf6-4e26-9670-49a30b7dab22-catalog-content\") pod \"18c7bbb6-bdf6-4e26-9670-49a30b7dab22\" (UID: \"18c7bbb6-bdf6-4e26-9670-49a30b7dab22\") " Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.349728 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18c7bbb6-bdf6-4e26-9670-49a30b7dab22-utilities\") pod \"18c7bbb6-bdf6-4e26-9670-49a30b7dab22\" (UID: \"18c7bbb6-bdf6-4e26-9670-49a30b7dab22\") " Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.349774 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2db1788d-f11c-49f6-b613-d3ed750c8d8a-catalog-content\") pod \"2db1788d-f11c-49f6-b613-d3ed750c8d8a\" (UID: \"2db1788d-f11c-49f6-b613-d3ed750c8d8a\") " Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.350046 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d28fe085-7a0e-4de6-8579-88e9583b87a3-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.350065 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmh7t\" (UniqueName: \"kubernetes.io/projected/d28fe085-7a0e-4de6-8579-88e9583b87a3-kube-api-access-nmh7t\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.350079 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d28fe085-7a0e-4de6-8579-88e9583b87a3-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.350361 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/112aad1f-e2f9-41e8-a9c8-1d3b3297528e-utilities" (OuterVolumeSpecName: "utilities") pod "112aad1f-e2f9-41e8-a9c8-1d3b3297528e" (UID: "112aad1f-e2f9-41e8-a9c8-1d3b3297528e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.351022 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2db1788d-f11c-49f6-b613-d3ed750c8d8a-utilities" (OuterVolumeSpecName: "utilities") pod "2db1788d-f11c-49f6-b613-d3ed750c8d8a" (UID: "2db1788d-f11c-49f6-b613-d3ed750c8d8a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.351399 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18c7bbb6-bdf6-4e26-9670-49a30b7dab22-utilities" (OuterVolumeSpecName: "utilities") pod "18c7bbb6-bdf6-4e26-9670-49a30b7dab22" (UID: "18c7bbb6-bdf6-4e26-9670-49a30b7dab22"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.352490 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44c5913f-a9a0-4b9f-aa60-c6158d19a38a-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "44c5913f-a9a0-4b9f-aa60-c6158d19a38a" (UID: "44c5913f-a9a0-4b9f-aa60-c6158d19a38a"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.352867 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44c5913f-a9a0-4b9f-aa60-c6158d19a38a-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "44c5913f-a9a0-4b9f-aa60-c6158d19a38a" (UID: "44c5913f-a9a0-4b9f-aa60-c6158d19a38a"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.352906 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44c5913f-a9a0-4b9f-aa60-c6158d19a38a-kube-api-access-5cxhv" (OuterVolumeSpecName: "kube-api-access-5cxhv") pod "44c5913f-a9a0-4b9f-aa60-c6158d19a38a" (UID: "44c5913f-a9a0-4b9f-aa60-c6158d19a38a"). InnerVolumeSpecName "kube-api-access-5cxhv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.353247 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/112aad1f-e2f9-41e8-a9c8-1d3b3297528e-kube-api-access-7jzsl" (OuterVolumeSpecName: "kube-api-access-7jzsl") pod "112aad1f-e2f9-41e8-a9c8-1d3b3297528e" (UID: "112aad1f-e2f9-41e8-a9c8-1d3b3297528e"). InnerVolumeSpecName "kube-api-access-7jzsl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.354726 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18c7bbb6-bdf6-4e26-9670-49a30b7dab22-kube-api-access-kch96" (OuterVolumeSpecName: "kube-api-access-kch96") pod "18c7bbb6-bdf6-4e26-9670-49a30b7dab22" (UID: "18c7bbb6-bdf6-4e26-9670-49a30b7dab22"). InnerVolumeSpecName "kube-api-access-kch96". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.357365 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2db1788d-f11c-49f6-b613-d3ed750c8d8a-kube-api-access-l5l6c" (OuterVolumeSpecName: "kube-api-access-l5l6c") pod "2db1788d-f11c-49f6-b613-d3ed750c8d8a" (UID: "2db1788d-f11c-49f6-b613-d3ed750c8d8a"). InnerVolumeSpecName "kube-api-access-l5l6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.393633 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2db1788d-f11c-49f6-b613-d3ed750c8d8a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2db1788d-f11c-49f6-b613-d3ed750c8d8a" (UID: "2db1788d-f11c-49f6-b613-d3ed750c8d8a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.416646 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18c7bbb6-bdf6-4e26-9670-49a30b7dab22-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "18c7bbb6-bdf6-4e26-9670-49a30b7dab22" (UID: "18c7bbb6-bdf6-4e26-9670-49a30b7dab22"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.450868 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2db1788d-f11c-49f6-b613-d3ed750c8d8a-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.450902 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5l6c\" (UniqueName: \"kubernetes.io/projected/2db1788d-f11c-49f6-b613-d3ed750c8d8a-kube-api-access-l5l6c\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.450954 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kch96\" (UniqueName: \"kubernetes.io/projected/18c7bbb6-bdf6-4e26-9670-49a30b7dab22-kube-api-access-kch96\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.450966 4869 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/44c5913f-a9a0-4b9f-aa60-c6158d19a38a-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.450977 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5cxhv\" (UniqueName: \"kubernetes.io/projected/44c5913f-a9a0-4b9f-aa60-c6158d19a38a-kube-api-access-5cxhv\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.450989 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18c7bbb6-bdf6-4e26-9670-49a30b7dab22-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.451000 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18c7bbb6-bdf6-4e26-9670-49a30b7dab22-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.451011 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2db1788d-f11c-49f6-b613-d3ed750c8d8a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.451024 4869 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/44c5913f-a9a0-4b9f-aa60-c6158d19a38a-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.451036 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/112aad1f-e2f9-41e8-a9c8-1d3b3297528e-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.451047 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jzsl\" (UniqueName: \"kubernetes.io/projected/112aad1f-e2f9-41e8-a9c8-1d3b3297528e-kube-api-access-7jzsl\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.499744 4869 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 30 10:58:04 crc kubenswrapper[4869]: E0130 10:58:04.500021 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d28fe085-7a0e-4de6-8579-88e9583b87a3" containerName="extract-utilities" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.500045 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d28fe085-7a0e-4de6-8579-88e9583b87a3" containerName="extract-utilities" Jan 30 10:58:04 crc kubenswrapper[4869]: E0130 10:58:04.500058 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" containerName="extract-content" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.500067 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" containerName="extract-content" Jan 30 10:58:04 crc kubenswrapper[4869]: E0130 10:58:04.500081 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" containerName="extract-utilities" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.500091 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" containerName="extract-utilities" Jan 30 10:58:04 crc kubenswrapper[4869]: E0130 10:58:04.500098 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" containerName="registry-server" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.500106 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" containerName="registry-server" Jan 30 10:58:04 crc kubenswrapper[4869]: E0130 10:58:04.500116 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" containerName="registry-server" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.500124 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" containerName="registry-server" Jan 30 10:58:04 crc kubenswrapper[4869]: E0130 10:58:04.500141 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" containerName="extract-utilities" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.500149 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" containerName="extract-utilities" Jan 30 10:58:04 crc kubenswrapper[4869]: E0130 10:58:04.500160 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" containerName="registry-server" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.500168 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" containerName="registry-server" Jan 30 10:58:04 crc kubenswrapper[4869]: E0130 10:58:04.500178 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d28fe085-7a0e-4de6-8579-88e9583b87a3" containerName="registry-server" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.500185 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d28fe085-7a0e-4de6-8579-88e9583b87a3" containerName="registry-server" Jan 30 10:58:04 crc kubenswrapper[4869]: E0130 10:58:04.500199 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" containerName="marketplace-operator" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.500207 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" containerName="marketplace-operator" Jan 30 10:58:04 crc kubenswrapper[4869]: E0130 10:58:04.500219 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d28fe085-7a0e-4de6-8579-88e9583b87a3" containerName="extract-content" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.500226 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d28fe085-7a0e-4de6-8579-88e9583b87a3" containerName="extract-content" Jan 30 10:58:04 crc kubenswrapper[4869]: E0130 10:58:04.500237 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" containerName="extract-utilities" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.500245 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" containerName="extract-utilities" Jan 30 10:58:04 crc kubenswrapper[4869]: E0130 10:58:04.500255 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" containerName="extract-content" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.500262 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" containerName="extract-content" Jan 30 10:58:04 crc kubenswrapper[4869]: E0130 10:58:04.500274 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" containerName="extract-content" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.500282 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" containerName="extract-content" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.500427 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="d28fe085-7a0e-4de6-8579-88e9583b87a3" containerName="registry-server" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.500447 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" containerName="registry-server" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.500459 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" containerName="registry-server" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.500468 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" containerName="marketplace-operator" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.500480 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" containerName="registry-server" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.500872 4869 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.501164 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d" gracePeriod=15 Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.501329 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.501365 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e" gracePeriod=15 Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.501377 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7" gracePeriod=15 Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.501447 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75" gracePeriod=15 Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.502003 4869 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.501587 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee" gracePeriod=15 Jan 30 10:58:04 crc kubenswrapper[4869]: E0130 10:58:04.502148 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.502167 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 30 10:58:04 crc kubenswrapper[4869]: E0130 10:58:04.502177 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.502187 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 30 10:58:04 crc kubenswrapper[4869]: E0130 10:58:04.502201 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.502209 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 30 10:58:04 crc kubenswrapper[4869]: E0130 10:58:04.502218 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.502226 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 30 10:58:04 crc kubenswrapper[4869]: E0130 10:58:04.502236 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.502245 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 30 10:58:04 crc kubenswrapper[4869]: E0130 10:58:04.502264 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.502273 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 30 10:58:04 crc kubenswrapper[4869]: E0130 10:58:04.502284 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.502291 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.502384 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.502395 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.502404 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.502413 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.502424 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.502432 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.502440 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 30 10:58:04 crc kubenswrapper[4869]: E0130 10:58:04.502544 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.502553 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.518573 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/112aad1f-e2f9-41e8-a9c8-1d3b3297528e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "112aad1f-e2f9-41e8-a9c8-1d3b3297528e" (UID: "112aad1f-e2f9-41e8-a9c8-1d3b3297528e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 10:58:04 crc kubenswrapper[4869]: E0130 10:58:04.539079 4869 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.246:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.552240 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/112aad1f-e2f9-41e8-a9c8-1d3b3297528e-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.653881 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.653928 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.653957 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.653976 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.654068 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.654151 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.654195 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.654243 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.755617 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.755686 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.755756 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.755779 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.755808 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.755837 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.755862 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.755903 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.755993 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.756040 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.756069 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.756103 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.756130 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.756156 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.756185 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.756243 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.840422 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 10:58:04 crc kubenswrapper[4869]: W0130 10:58:04.860548 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-0fbe3b6dcf2f2687f9ab114864e44e3cc9d764458a48f63c0c57305199e6f08c WatchSource:0}: Error finding container 0fbe3b6dcf2f2687f9ab114864e44e3cc9d764458a48f63c0c57305199e6f08c: Status 404 returned error can't find the container with id 0fbe3b6dcf2f2687f9ab114864e44e3cc9d764458a48f63c0c57305199e6f08c Jan 30 10:58:04 crc kubenswrapper[4869]: E0130 10:58:04.862963 4869 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.246:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188f7d0fad20b5b8 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-30 10:58:04.862477752 +0000 UTC m=+235.412353818,LastTimestamp:2026-01-30 10:58:04.862477752 +0000 UTC m=+235.412353818,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.907525 4869 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.907583 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.993564 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-d68sd_98cb9d90-57ea-4bf2-8ee4-dbcf18e79293/marketplace-operator/0.log" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.993613 4869 generic.go:334] "Generic (PLEG): container finished" podID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" containerID="d889f44b80a0869de9abb58f7b00dd9b30be77df8724a4877e34f89dfb17f2ba" exitCode=1 Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.993682 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" event={"ID":"98cb9d90-57ea-4bf2-8ee4-dbcf18e79293","Type":"ContainerDied","Data":"d889f44b80a0869de9abb58f7b00dd9b30be77df8724a4877e34f89dfb17f2ba"} Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.993729 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" event={"ID":"98cb9d90-57ea-4bf2-8ee4-dbcf18e79293","Type":"ContainerStarted","Data":"6c69f05afb8c16739aaa0f9c6fdcd2ea0254551f12e293b4e0a240ec1b24f49e"} Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.994121 4869 scope.go:117] "RemoveContainer" containerID="d889f44b80a0869de9abb58f7b00dd9b30be77df8724a4877e34f89dfb17f2ba" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.994438 4869 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.994979 4869 status_manager.go:851] "Failed to get status for pod" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-d68sd\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.997094 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dhxch" event={"ID":"112aad1f-e2f9-41e8-a9c8-1d3b3297528e","Type":"ContainerDied","Data":"ba5a68e1b760c91efdf68d52c685417ab35111c5aff5860f36a75614461f0325"} Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.997136 4869 scope.go:117] "RemoveContainer" containerID="31176bb44bb5d56bedb53229458b2ecc95c29c22b00f6b78a0b960d5dcc41f79" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.997196 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dhxch" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.998159 4869 status_manager.go:851] "Failed to get status for pod" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-d68sd\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.998494 4869 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:04 crc kubenswrapper[4869]: I0130 10:58:04.998895 4869 status_manager.go:851] "Failed to get status for pod" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" pod="openshift-marketplace/redhat-operators-dhxch" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dhxch\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.000281 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ht8ck" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.000272 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ht8ck" event={"ID":"2db1788d-f11c-49f6-b613-d3ed750c8d8a","Type":"ContainerDied","Data":"aed7914cbd64b379d96dd6f6b6396d1bd9b26794ddfdb939a94d36ef14c2f1f2"} Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.000878 4869 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.001143 4869 status_manager.go:851] "Failed to get status for pod" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" pod="openshift-marketplace/redhat-marketplace-ht8ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ht8ck\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.001374 4869 status_manager.go:851] "Failed to get status for pod" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" pod="openshift-marketplace/redhat-operators-dhxch" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dhxch\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.001639 4869 status_manager.go:851] "Failed to get status for pod" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-d68sd\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.002161 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.002163 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" event={"ID":"44c5913f-a9a0-4b9f-aa60-c6158d19a38a","Type":"ContainerDied","Data":"23039b801e47c9fdf53e8d154aa8558897595887edf58d3d344f883707b60a00"} Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.003020 4869 status_manager.go:851] "Failed to get status for pod" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-d68sd\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.003273 4869 status_manager.go:851] "Failed to get status for pod" podUID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-rkwmf\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.003537 4869 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.003791 4869 status_manager.go:851] "Failed to get status for pod" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" pod="openshift-marketplace/redhat-operators-dhxch" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dhxch\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.004115 4869 status_manager.go:851] "Failed to get status for pod" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" pod="openshift-marketplace/redhat-marketplace-ht8ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ht8ck\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.007208 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"0fbe3b6dcf2f2687f9ab114864e44e3cc9d764458a48f63c0c57305199e6f08c"} Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.012033 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.014277 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.015541 4869 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7" exitCode=0 Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.015566 4869 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75" exitCode=0 Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.015575 4869 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e" exitCode=0 Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.015583 4869 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee" exitCode=2 Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.019879 4869 generic.go:334] "Generic (PLEG): container finished" podID="1cc76d4c-23ec-4747-8441-727344a3ccff" containerID="9f696f3ef130ec93578b505f273622e051d2417207ec42915c94b0c1012f05b1" exitCode=0 Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.019943 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"1cc76d4c-23ec-4747-8441-727344a3ccff","Type":"ContainerDied","Data":"9f696f3ef130ec93578b505f273622e051d2417207ec42915c94b0c1012f05b1"} Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.021277 4869 status_manager.go:851] "Failed to get status for pod" podUID="1cc76d4c-23ec-4747-8441-727344a3ccff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.021611 4869 status_manager.go:851] "Failed to get status for pod" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-d68sd\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.022348 4869 status_manager.go:851] "Failed to get status for pod" podUID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-rkwmf\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.023106 4869 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.023835 4869 status_manager.go:851] "Failed to get status for pod" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" pod="openshift-marketplace/redhat-marketplace-ht8ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ht8ck\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.024837 4869 status_manager.go:851] "Failed to get status for pod" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" pod="openshift-marketplace/redhat-operators-dhxch" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dhxch\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.026041 4869 scope.go:117] "RemoveContainer" containerID="ef927e52f370e13eac259fe167a45848c8199acc84fc1738f61757ab0452ff90" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.028868 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gcxdk" event={"ID":"18c7bbb6-bdf6-4e26-9670-49a30b7dab22","Type":"ContainerDied","Data":"feaf8db97665ad2d45ddf0b3158cedacbd035581c14e79ebe8ba3dcd181914db"} Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.028876 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gcxdk" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.029855 4869 status_manager.go:851] "Failed to get status for pod" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" pod="openshift-marketplace/redhat-marketplace-ht8ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ht8ck\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.030153 4869 status_manager.go:851] "Failed to get status for pod" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" pod="openshift-marketplace/redhat-operators-dhxch" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dhxch\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.030478 4869 status_manager.go:851] "Failed to get status for pod" podUID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" pod="openshift-marketplace/community-operators-gcxdk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gcxdk\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.030803 4869 status_manager.go:851] "Failed to get status for pod" podUID="1cc76d4c-23ec-4747-8441-727344a3ccff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.031092 4869 status_manager.go:851] "Failed to get status for pod" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-d68sd\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.031613 4869 status_manager.go:851] "Failed to get status for pod" podUID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-rkwmf\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.037996 4869 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.039909 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m7nxw" event={"ID":"d28fe085-7a0e-4de6-8579-88e9583b87a3","Type":"ContainerDied","Data":"e995cf49722965579c19fef06427505d500e5691e2d71c8ab6272a645509515e"} Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.040015 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m7nxw" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.041032 4869 status_manager.go:851] "Failed to get status for pod" podUID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-rkwmf\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.041234 4869 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.041607 4869 status_manager.go:851] "Failed to get status for pod" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" pod="openshift-marketplace/redhat-marketplace-ht8ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ht8ck\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.042028 4869 status_manager.go:851] "Failed to get status for pod" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" pod="openshift-marketplace/redhat-operators-dhxch" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dhxch\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.042231 4869 status_manager.go:851] "Failed to get status for pod" podUID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" pod="openshift-marketplace/community-operators-gcxdk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gcxdk\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.042431 4869 status_manager.go:851] "Failed to get status for pod" podUID="d28fe085-7a0e-4de6-8579-88e9583b87a3" pod="openshift-marketplace/certified-operators-m7nxw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-m7nxw\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.042622 4869 status_manager.go:851] "Failed to get status for pod" podUID="1cc76d4c-23ec-4747-8441-727344a3ccff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.042844 4869 status_manager.go:851] "Failed to get status for pod" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-d68sd\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.054311 4869 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.054694 4869 status_manager.go:851] "Failed to get status for pod" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" pod="openshift-marketplace/redhat-operators-dhxch" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dhxch\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.054959 4869 status_manager.go:851] "Failed to get status for pod" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" pod="openshift-marketplace/redhat-marketplace-ht8ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ht8ck\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.055243 4869 status_manager.go:851] "Failed to get status for pod" podUID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" pod="openshift-marketplace/community-operators-gcxdk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gcxdk\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.056075 4869 status_manager.go:851] "Failed to get status for pod" podUID="d28fe085-7a0e-4de6-8579-88e9583b87a3" pod="openshift-marketplace/certified-operators-m7nxw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-m7nxw\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.056783 4869 status_manager.go:851] "Failed to get status for pod" podUID="1cc76d4c-23ec-4747-8441-727344a3ccff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.057005 4869 status_manager.go:851] "Failed to get status for pod" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-d68sd\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.057294 4869 status_manager.go:851] "Failed to get status for pod" podUID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-rkwmf\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.059122 4869 status_manager.go:851] "Failed to get status for pod" podUID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" pod="openshift-marketplace/community-operators-gcxdk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gcxdk\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.060093 4869 status_manager.go:851] "Failed to get status for pod" podUID="d28fe085-7a0e-4de6-8579-88e9583b87a3" pod="openshift-marketplace/certified-operators-m7nxw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-m7nxw\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.060336 4869 status_manager.go:851] "Failed to get status for pod" podUID="1cc76d4c-23ec-4747-8441-727344a3ccff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.060556 4869 status_manager.go:851] "Failed to get status for pod" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-d68sd\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.060745 4869 status_manager.go:851] "Failed to get status for pod" podUID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-rkwmf\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.060892 4869 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.061035 4869 status_manager.go:851] "Failed to get status for pod" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" pod="openshift-marketplace/redhat-marketplace-ht8ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ht8ck\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.061177 4869 status_manager.go:851] "Failed to get status for pod" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" pod="openshift-marketplace/redhat-operators-dhxch" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dhxch\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.070357 4869 scope.go:117] "RemoveContainer" containerID="c929e29bb2baec575e0d7b0ca2e44c8a5bc506141cc185b6d817b8f6fd97b1d8" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.089281 4869 status_manager.go:851] "Failed to get status for pod" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" pod="openshift-marketplace/redhat-operators-dhxch" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dhxch\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.089555 4869 status_manager.go:851] "Failed to get status for pod" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" pod="openshift-marketplace/redhat-marketplace-ht8ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ht8ck\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.089861 4869 status_manager.go:851] "Failed to get status for pod" podUID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" pod="openshift-marketplace/community-operators-gcxdk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gcxdk\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.090172 4869 status_manager.go:851] "Failed to get status for pod" podUID="d28fe085-7a0e-4de6-8579-88e9583b87a3" pod="openshift-marketplace/certified-operators-m7nxw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-m7nxw\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.090430 4869 status_manager.go:851] "Failed to get status for pod" podUID="1cc76d4c-23ec-4747-8441-727344a3ccff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.090673 4869 status_manager.go:851] "Failed to get status for pod" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-d68sd\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.091118 4869 status_manager.go:851] "Failed to get status for pod" podUID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-rkwmf\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.091369 4869 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.092051 4869 status_manager.go:851] "Failed to get status for pod" podUID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" pod="openshift-marketplace/community-operators-gcxdk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gcxdk\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.092867 4869 status_manager.go:851] "Failed to get status for pod" podUID="d28fe085-7a0e-4de6-8579-88e9583b87a3" pod="openshift-marketplace/certified-operators-m7nxw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-m7nxw\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.093653 4869 status_manager.go:851] "Failed to get status for pod" podUID="1cc76d4c-23ec-4747-8441-727344a3ccff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.095015 4869 status_manager.go:851] "Failed to get status for pod" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-d68sd\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.095628 4869 status_manager.go:851] "Failed to get status for pod" podUID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-rkwmf\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.096163 4869 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.096782 4869 status_manager.go:851] "Failed to get status for pod" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" pod="openshift-marketplace/redhat-operators-dhxch" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dhxch\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.098203 4869 status_manager.go:851] "Failed to get status for pod" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" pod="openshift-marketplace/redhat-marketplace-ht8ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ht8ck\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.103694 4869 scope.go:117] "RemoveContainer" containerID="28108176efa84fbe3b6f2c8d41b3d9bba63e02a9a1602e373c260a4134f00b22" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.120416 4869 scope.go:117] "RemoveContainer" containerID="29777e8206342d3f8c8cbe8d0c0281461676020493eac04312adf9869b7a2f02" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.136229 4869 scope.go:117] "RemoveContainer" containerID="93cbae64c4b7aeb176d1c21ab71f0706da038fea57405f151ffb4c7379675e81" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.154180 4869 scope.go:117] "RemoveContainer" containerID="4940af762021649fe2a2f10dab03be9c7650594d14e6eb542d31f2dfe45d3964" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.168155 4869 scope.go:117] "RemoveContainer" containerID="888358b2d3b1374ade8f7cd6879cb57c24c6b201acf14c2f187120d209dcbaa5" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.203532 4869 scope.go:117] "RemoveContainer" containerID="2fe077b01ea0475b048c92fcbda9b7f4612e727195f1f158659f467d3e7581f9" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.217468 4869 scope.go:117] "RemoveContainer" containerID="a46762d485a43d8e6e1d3a3e3b044c6ec091159b57794397e9cf4b732608d007" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.224071 4869 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:6443/readyz\": dial tcp 192.168.126.11:6443: connect: connection refused" start-of-body= Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.224252 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/readyz\": dial tcp 192.168.126.11:6443: connect: connection refused" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.237111 4869 scope.go:117] "RemoveContainer" containerID="296db95de69b512c57293facb3afb31c9828e48b2c80c1d69d0a717381ff9469" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.255518 4869 scope.go:117] "RemoveContainer" containerID="e14637d0ad031188eb163b753afb5ef579d9edccc84ee0bef98e945349d5608d" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.268791 4869 scope.go:117] "RemoveContainer" containerID="3d4666107dbbf62a5148e0fa95079ed0e256fbd42d4e61dc7cf3fd3413b8a50d" Jan 30 10:58:05 crc kubenswrapper[4869]: I0130 10:58:05.288159 4869 scope.go:117] "RemoveContainer" containerID="469c241b1c8ffafef148bc223299795896cbaaaa31470e02b39c5b54ecb94a44" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.051520 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"81e79c074563928047167b1b45e770719f4e000bb1dbb002f0fae68dc2d5a851"} Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.052195 4869 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: E0130 10:58:06.052270 4869 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.246:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.053294 4869 status_manager.go:851] "Failed to get status for pod" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" pod="openshift-marketplace/redhat-marketplace-ht8ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ht8ck\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.053381 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-d68sd_98cb9d90-57ea-4bf2-8ee4-dbcf18e79293/marketplace-operator/1.log" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.053750 4869 status_manager.go:851] "Failed to get status for pod" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" pod="openshift-marketplace/redhat-operators-dhxch" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dhxch\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.053785 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-d68sd_98cb9d90-57ea-4bf2-8ee4-dbcf18e79293/marketplace-operator/0.log" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.053817 4869 generic.go:334] "Generic (PLEG): container finished" podID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" containerID="c103567a0217635df6d484a7f9d68bb3cd9882c81b538fdccdaa52d7fc332f95" exitCode=1 Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.053941 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" event={"ID":"98cb9d90-57ea-4bf2-8ee4-dbcf18e79293","Type":"ContainerDied","Data":"c103567a0217635df6d484a7f9d68bb3cd9882c81b538fdccdaa52d7fc332f95"} Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.053999 4869 scope.go:117] "RemoveContainer" containerID="d889f44b80a0869de9abb58f7b00dd9b30be77df8724a4877e34f89dfb17f2ba" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.054002 4869 status_manager.go:851] "Failed to get status for pod" podUID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" pod="openshift-marketplace/community-operators-gcxdk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gcxdk\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.054286 4869 status_manager.go:851] "Failed to get status for pod" podUID="d28fe085-7a0e-4de6-8579-88e9583b87a3" pod="openshift-marketplace/certified-operators-m7nxw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-m7nxw\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.054520 4869 scope.go:117] "RemoveContainer" containerID="c103567a0217635df6d484a7f9d68bb3cd9882c81b538fdccdaa52d7fc332f95" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.054548 4869 status_manager.go:851] "Failed to get status for pod" podUID="1cc76d4c-23ec-4747-8441-727344a3ccff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.054812 4869 status_manager.go:851] "Failed to get status for pod" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-d68sd\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: E0130 10:58:06.054846 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-d68sd_openshift-marketplace(98cb9d90-57ea-4bf2-8ee4-dbcf18e79293)\"" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.055018 4869 status_manager.go:851] "Failed to get status for pod" podUID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-rkwmf\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.055477 4869 status_manager.go:851] "Failed to get status for pod" podUID="d28fe085-7a0e-4de6-8579-88e9583b87a3" pod="openshift-marketplace/certified-operators-m7nxw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-m7nxw\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.056057 4869 status_manager.go:851] "Failed to get status for pod" podUID="1cc76d4c-23ec-4747-8441-727344a3ccff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.056404 4869 status_manager.go:851] "Failed to get status for pod" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-d68sd\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.056442 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.056623 4869 status_manager.go:851] "Failed to get status for pod" podUID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-rkwmf\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.056860 4869 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.057077 4869 status_manager.go:851] "Failed to get status for pod" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" pod="openshift-marketplace/redhat-marketplace-ht8ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ht8ck\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.057343 4869 status_manager.go:851] "Failed to get status for pod" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" pod="openshift-marketplace/redhat-operators-dhxch" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dhxch\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.057618 4869 status_manager.go:851] "Failed to get status for pod" podUID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" pod="openshift-marketplace/community-operators-gcxdk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gcxdk\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: E0130 10:58:06.206801 4869 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.246:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188f7d0fad20b5b8 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-30 10:58:04.862477752 +0000 UTC m=+235.412353818,LastTimestamp:2026-01-30 10:58:04.862477752 +0000 UTC m=+235.412353818,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.285466 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.286045 4869 status_manager.go:851] "Failed to get status for pod" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" pod="openshift-marketplace/redhat-marketplace-ht8ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ht8ck\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.286530 4869 status_manager.go:851] "Failed to get status for pod" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" pod="openshift-marketplace/redhat-operators-dhxch" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dhxch\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.286899 4869 status_manager.go:851] "Failed to get status for pod" podUID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" pod="openshift-marketplace/community-operators-gcxdk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gcxdk\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.287133 4869 status_manager.go:851] "Failed to get status for pod" podUID="d28fe085-7a0e-4de6-8579-88e9583b87a3" pod="openshift-marketplace/certified-operators-m7nxw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-m7nxw\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.287386 4869 status_manager.go:851] "Failed to get status for pod" podUID="1cc76d4c-23ec-4747-8441-727344a3ccff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.287666 4869 status_manager.go:851] "Failed to get status for pod" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-d68sd\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.287973 4869 status_manager.go:851] "Failed to get status for pod" podUID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-rkwmf\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.378511 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1cc76d4c-23ec-4747-8441-727344a3ccff-kubelet-dir\") pod \"1cc76d4c-23ec-4747-8441-727344a3ccff\" (UID: \"1cc76d4c-23ec-4747-8441-727344a3ccff\") " Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.378588 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1cc76d4c-23ec-4747-8441-727344a3ccff-kube-api-access\") pod \"1cc76d4c-23ec-4747-8441-727344a3ccff\" (UID: \"1cc76d4c-23ec-4747-8441-727344a3ccff\") " Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.378642 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/1cc76d4c-23ec-4747-8441-727344a3ccff-var-lock\") pod \"1cc76d4c-23ec-4747-8441-727344a3ccff\" (UID: \"1cc76d4c-23ec-4747-8441-727344a3ccff\") " Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.378667 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1cc76d4c-23ec-4747-8441-727344a3ccff-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "1cc76d4c-23ec-4747-8441-727344a3ccff" (UID: "1cc76d4c-23ec-4747-8441-727344a3ccff"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.378807 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1cc76d4c-23ec-4747-8441-727344a3ccff-var-lock" (OuterVolumeSpecName: "var-lock") pod "1cc76d4c-23ec-4747-8441-727344a3ccff" (UID: "1cc76d4c-23ec-4747-8441-727344a3ccff"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.379003 4869 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1cc76d4c-23ec-4747-8441-727344a3ccff-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.379028 4869 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/1cc76d4c-23ec-4747-8441-727344a3ccff-var-lock\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.385879 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cc76d4c-23ec-4747-8441-727344a3ccff-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1cc76d4c-23ec-4747-8441-727344a3ccff" (UID: "1cc76d4c-23ec-4747-8441-727344a3ccff"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.480307 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1cc76d4c-23ec-4747-8441-727344a3ccff-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.864838 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.865942 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.866749 4869 status_manager.go:851] "Failed to get status for pod" podUID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" pod="openshift-marketplace/community-operators-gcxdk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gcxdk\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.867400 4869 status_manager.go:851] "Failed to get status for pod" podUID="d28fe085-7a0e-4de6-8579-88e9583b87a3" pod="openshift-marketplace/certified-operators-m7nxw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-m7nxw\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.867897 4869 status_manager.go:851] "Failed to get status for pod" podUID="1cc76d4c-23ec-4747-8441-727344a3ccff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.868179 4869 status_manager.go:851] "Failed to get status for pod" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-d68sd\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.868504 4869 status_manager.go:851] "Failed to get status for pod" podUID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-rkwmf\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.868852 4869 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.869134 4869 status_manager.go:851] "Failed to get status for pod" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" pod="openshift-marketplace/redhat-marketplace-ht8ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ht8ck\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.869393 4869 status_manager.go:851] "Failed to get status for pod" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" pod="openshift-marketplace/redhat-operators-dhxch" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dhxch\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.985578 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.985616 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.985648 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.985692 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.985796 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.985847 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.985968 4869 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.985983 4869 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:06 crc kubenswrapper[4869]: I0130 10:58:06.985992 4869 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.068343 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-d68sd_98cb9d90-57ea-4bf2-8ee4-dbcf18e79293/marketplace-operator/1.log" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.069006 4869 scope.go:117] "RemoveContainer" containerID="c103567a0217635df6d484a7f9d68bb3cd9882c81b538fdccdaa52d7fc332f95" Jan 30 10:58:07 crc kubenswrapper[4869]: E0130 10:58:07.069234 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-d68sd_openshift-marketplace(98cb9d90-57ea-4bf2-8ee4-dbcf18e79293)\"" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.069316 4869 status_manager.go:851] "Failed to get status for pod" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" pod="openshift-marketplace/redhat-marketplace-ht8ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ht8ck\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.069813 4869 status_manager.go:851] "Failed to get status for pod" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" pod="openshift-marketplace/redhat-operators-dhxch" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dhxch\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.070266 4869 status_manager.go:851] "Failed to get status for pod" podUID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" pod="openshift-marketplace/community-operators-gcxdk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gcxdk\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.070567 4869 status_manager.go:851] "Failed to get status for pod" podUID="d28fe085-7a0e-4de6-8579-88e9583b87a3" pod="openshift-marketplace/certified-operators-m7nxw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-m7nxw\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.070956 4869 status_manager.go:851] "Failed to get status for pod" podUID="1cc76d4c-23ec-4747-8441-727344a3ccff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.071237 4869 status_manager.go:851] "Failed to get status for pod" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-d68sd\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.071295 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.071518 4869 status_manager.go:851] "Failed to get status for pod" podUID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-rkwmf\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.071976 4869 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.072129 4869 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d" exitCode=0 Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.072223 4869 scope.go:117] "RemoveContainer" containerID="d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.072245 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.074207 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.074201 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"1cc76d4c-23ec-4747-8441-727344a3ccff","Type":"ContainerDied","Data":"548ce00d38000e5522e9121092fea1c37cef8af4989a2da156a9908f0224146c"} Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.074364 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="548ce00d38000e5522e9121092fea1c37cef8af4989a2da156a9908f0224146c" Jan 30 10:58:07 crc kubenswrapper[4869]: E0130 10:58:07.074759 4869 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.246:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.087071 4869 scope.go:117] "RemoveContainer" containerID="9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.091409 4869 status_manager.go:851] "Failed to get status for pod" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" pod="openshift-marketplace/redhat-marketplace-ht8ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ht8ck\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.091959 4869 status_manager.go:851] "Failed to get status for pod" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" pod="openshift-marketplace/redhat-operators-dhxch" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dhxch\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.092205 4869 status_manager.go:851] "Failed to get status for pod" podUID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" pod="openshift-marketplace/community-operators-gcxdk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gcxdk\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.092444 4869 status_manager.go:851] "Failed to get status for pod" podUID="d28fe085-7a0e-4de6-8579-88e9583b87a3" pod="openshift-marketplace/certified-operators-m7nxw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-m7nxw\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.092655 4869 status_manager.go:851] "Failed to get status for pod" podUID="1cc76d4c-23ec-4747-8441-727344a3ccff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.092926 4869 status_manager.go:851] "Failed to get status for pod" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-d68sd\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.093238 4869 status_manager.go:851] "Failed to get status for pod" podUID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-rkwmf\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.093683 4869 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.094036 4869 status_manager.go:851] "Failed to get status for pod" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" pod="openshift-marketplace/redhat-marketplace-ht8ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ht8ck\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.094283 4869 status_manager.go:851] "Failed to get status for pod" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" pod="openshift-marketplace/redhat-operators-dhxch" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dhxch\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.094590 4869 status_manager.go:851] "Failed to get status for pod" podUID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" pod="openshift-marketplace/community-operators-gcxdk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gcxdk\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.094856 4869 status_manager.go:851] "Failed to get status for pod" podUID="d28fe085-7a0e-4de6-8579-88e9583b87a3" pod="openshift-marketplace/certified-operators-m7nxw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-m7nxw\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.095134 4869 status_manager.go:851] "Failed to get status for pod" podUID="1cc76d4c-23ec-4747-8441-727344a3ccff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.095434 4869 status_manager.go:851] "Failed to get status for pod" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-d68sd\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.095656 4869 status_manager.go:851] "Failed to get status for pod" podUID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-rkwmf\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.095907 4869 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.100384 4869 scope.go:117] "RemoveContainer" containerID="3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.111271 4869 scope.go:117] "RemoveContainer" containerID="0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.125857 4869 scope.go:117] "RemoveContainer" containerID="c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.140330 4869 scope.go:117] "RemoveContainer" containerID="19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.158997 4869 scope.go:117] "RemoveContainer" containerID="d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7" Jan 30 10:58:07 crc kubenswrapper[4869]: E0130 10:58:07.159698 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7\": container with ID starting with d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7 not found: ID does not exist" containerID="d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.159784 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7"} err="failed to get container status \"d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7\": rpc error: code = NotFound desc = could not find container \"d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7\": container with ID starting with d6021684c7c2a5f4c624bf0e05f963d652080c7dd8c13835a1b4a81a32e226e7 not found: ID does not exist" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.159812 4869 scope.go:117] "RemoveContainer" containerID="9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75" Jan 30 10:58:07 crc kubenswrapper[4869]: E0130 10:58:07.160039 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\": container with ID starting with 9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75 not found: ID does not exist" containerID="9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.160067 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75"} err="failed to get container status \"9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\": rpc error: code = NotFound desc = could not find container \"9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75\": container with ID starting with 9da430e1dc76d89b585c9016cadd4f30cbf1153685732266e418c63c576bbd75 not found: ID does not exist" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.160087 4869 scope.go:117] "RemoveContainer" containerID="3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e" Jan 30 10:58:07 crc kubenswrapper[4869]: E0130 10:58:07.160385 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\": container with ID starting with 3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e not found: ID does not exist" containerID="3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.160430 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e"} err="failed to get container status \"3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\": rpc error: code = NotFound desc = could not find container \"3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e\": container with ID starting with 3c988aa0f6ae88c537ad69d664ebcfad491fdc01747aec10eba27041772a715e not found: ID does not exist" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.160460 4869 scope.go:117] "RemoveContainer" containerID="0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee" Jan 30 10:58:07 crc kubenswrapper[4869]: E0130 10:58:07.160966 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\": container with ID starting with 0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee not found: ID does not exist" containerID="0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.160990 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee"} err="failed to get container status \"0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\": rpc error: code = NotFound desc = could not find container \"0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee\": container with ID starting with 0815e0f3a32e39e741cba13bf0aa7403c7fbf6c47871e26a9b04de4df526aeee not found: ID does not exist" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.161003 4869 scope.go:117] "RemoveContainer" containerID="c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d" Jan 30 10:58:07 crc kubenswrapper[4869]: E0130 10:58:07.161319 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\": container with ID starting with c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d not found: ID does not exist" containerID="c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.161348 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d"} err="failed to get container status \"c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\": rpc error: code = NotFound desc = could not find container \"c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d\": container with ID starting with c128ae9f73f6fafa1136d27a30e6712f6651bacdf7998d18acdf8013b8e9da3d not found: ID does not exist" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.161387 4869 scope.go:117] "RemoveContainer" containerID="19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811" Jan 30 10:58:07 crc kubenswrapper[4869]: E0130 10:58:07.161736 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\": container with ID starting with 19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811 not found: ID does not exist" containerID="19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811" Jan 30 10:58:07 crc kubenswrapper[4869]: I0130 10:58:07.161761 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811"} err="failed to get container status \"19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\": rpc error: code = NotFound desc = could not find container \"19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811\": container with ID starting with 19c74fbefcb4cb3fec1c3db971884231b3acd7519a51e924f8be09a88cc73811 not found: ID does not exist" Jan 30 10:58:08 crc kubenswrapper[4869]: I0130 10:58:08.142101 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Jan 30 10:58:10 crc kubenswrapper[4869]: I0130 10:58:10.137365 4869 status_manager.go:851] "Failed to get status for pod" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" pod="openshift-marketplace/redhat-marketplace-ht8ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ht8ck\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:10 crc kubenswrapper[4869]: I0130 10:58:10.139678 4869 status_manager.go:851] "Failed to get status for pod" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" pod="openshift-marketplace/redhat-operators-dhxch" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dhxch\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:10 crc kubenswrapper[4869]: I0130 10:58:10.140043 4869 status_manager.go:851] "Failed to get status for pod" podUID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" pod="openshift-marketplace/community-operators-gcxdk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gcxdk\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:10 crc kubenswrapper[4869]: I0130 10:58:10.140369 4869 status_manager.go:851] "Failed to get status for pod" podUID="d28fe085-7a0e-4de6-8579-88e9583b87a3" pod="openshift-marketplace/certified-operators-m7nxw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-m7nxw\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:10 crc kubenswrapper[4869]: I0130 10:58:10.140626 4869 status_manager.go:851] "Failed to get status for pod" podUID="1cc76d4c-23ec-4747-8441-727344a3ccff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:10 crc kubenswrapper[4869]: I0130 10:58:10.140880 4869 status_manager.go:851] "Failed to get status for pod" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-d68sd\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:10 crc kubenswrapper[4869]: I0130 10:58:10.142129 4869 status_manager.go:851] "Failed to get status for pod" podUID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-rkwmf\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:10 crc kubenswrapper[4869]: E0130 10:58:10.149584 4869 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.246:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" volumeName="registry-storage" Jan 30 10:58:10 crc kubenswrapper[4869]: E0130 10:58:10.180395 4869 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:10 crc kubenswrapper[4869]: E0130 10:58:10.184757 4869 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:10 crc kubenswrapper[4869]: E0130 10:58:10.185242 4869 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:10 crc kubenswrapper[4869]: E0130 10:58:10.185425 4869 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:10 crc kubenswrapper[4869]: E0130 10:58:10.185634 4869 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:10 crc kubenswrapper[4869]: I0130 10:58:10.185657 4869 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Jan 30 10:58:10 crc kubenswrapper[4869]: E0130 10:58:10.185832 4869 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.246:6443: connect: connection refused" interval="200ms" Jan 30 10:58:10 crc kubenswrapper[4869]: E0130 10:58:10.386958 4869 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.246:6443: connect: connection refused" interval="400ms" Jan 30 10:58:10 crc kubenswrapper[4869]: E0130 10:58:10.788383 4869 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.246:6443: connect: connection refused" interval="800ms" Jan 30 10:58:11 crc kubenswrapper[4869]: E0130 10:58:11.589476 4869 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.246:6443: connect: connection refused" interval="1.6s" Jan 30 10:58:13 crc kubenswrapper[4869]: E0130 10:58:13.190886 4869 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.246:6443: connect: connection refused" interval="3.2s" Jan 30 10:58:14 crc kubenswrapper[4869]: I0130 10:58:14.026788 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" Jan 30 10:58:14 crc kubenswrapper[4869]: I0130 10:58:14.026914 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" Jan 30 10:58:14 crc kubenswrapper[4869]: I0130 10:58:14.027334 4869 scope.go:117] "RemoveContainer" containerID="c103567a0217635df6d484a7f9d68bb3cd9882c81b538fdccdaa52d7fc332f95" Jan 30 10:58:14 crc kubenswrapper[4869]: E0130 10:58:14.027520 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-d68sd_openshift-marketplace(98cb9d90-57ea-4bf2-8ee4-dbcf18e79293)\"" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" Jan 30 10:58:14 crc kubenswrapper[4869]: I0130 10:58:14.117503 4869 scope.go:117] "RemoveContainer" containerID="c103567a0217635df6d484a7f9d68bb3cd9882c81b538fdccdaa52d7fc332f95" Jan 30 10:58:14 crc kubenswrapper[4869]: E0130 10:58:14.117726 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-d68sd_openshift-marketplace(98cb9d90-57ea-4bf2-8ee4-dbcf18e79293)\"" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" Jan 30 10:58:16 crc kubenswrapper[4869]: E0130 10:58:16.207907 4869 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.246:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188f7d0fad20b5b8 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-30 10:58:04.862477752 +0000 UTC m=+235.412353818,LastTimestamp:2026-01-30 10:58:04.862477752 +0000 UTC m=+235.412353818,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 30 10:58:16 crc kubenswrapper[4869]: E0130 10:58:16.392069 4869 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.246:6443: connect: connection refused" interval="6.4s" Jan 30 10:58:18 crc kubenswrapper[4869]: I0130 10:58:18.132960 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 10:58:18 crc kubenswrapper[4869]: I0130 10:58:18.133944 4869 status_manager.go:851] "Failed to get status for pod" podUID="1cc76d4c-23ec-4747-8441-727344a3ccff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:18 crc kubenswrapper[4869]: I0130 10:58:18.134266 4869 status_manager.go:851] "Failed to get status for pod" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-d68sd\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:18 crc kubenswrapper[4869]: I0130 10:58:18.134669 4869 status_manager.go:851] "Failed to get status for pod" podUID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-rkwmf\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:18 crc kubenswrapper[4869]: I0130 10:58:18.135785 4869 status_manager.go:851] "Failed to get status for pod" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" pod="openshift-marketplace/redhat-marketplace-ht8ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ht8ck\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:18 crc kubenswrapper[4869]: I0130 10:58:18.136105 4869 status_manager.go:851] "Failed to get status for pod" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" pod="openshift-marketplace/redhat-operators-dhxch" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dhxch\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:18 crc kubenswrapper[4869]: I0130 10:58:18.136420 4869 status_manager.go:851] "Failed to get status for pod" podUID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" pod="openshift-marketplace/community-operators-gcxdk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gcxdk\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:18 crc kubenswrapper[4869]: I0130 10:58:18.136863 4869 status_manager.go:851] "Failed to get status for pod" podUID="d28fe085-7a0e-4de6-8579-88e9583b87a3" pod="openshift-marketplace/certified-operators-m7nxw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-m7nxw\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:18 crc kubenswrapper[4869]: I0130 10:58:18.149899 4869 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cf30ffd7-2194-421d-baa2-f27e0dc8445e" Jan 30 10:58:18 crc kubenswrapper[4869]: I0130 10:58:18.149949 4869 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cf30ffd7-2194-421d-baa2-f27e0dc8445e" Jan 30 10:58:18 crc kubenswrapper[4869]: E0130 10:58:18.150445 4869 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 10:58:18 crc kubenswrapper[4869]: I0130 10:58:18.151013 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 10:58:19 crc kubenswrapper[4869]: I0130 10:58:19.147426 4869 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="b01481a69c4b3cb81d9dadd4cce7dbc7c2280d766065b20668dcab98cf5d9e67" exitCode=0 Jan 30 10:58:19 crc kubenswrapper[4869]: I0130 10:58:19.147532 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"b01481a69c4b3cb81d9dadd4cce7dbc7c2280d766065b20668dcab98cf5d9e67"} Jan 30 10:58:19 crc kubenswrapper[4869]: I0130 10:58:19.147978 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"ee6dd77655d2d7ae1c5a78e7096dc5b033fc9ca230681ad73b269753a0b0f0c0"} Jan 30 10:58:19 crc kubenswrapper[4869]: I0130 10:58:19.148535 4869 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cf30ffd7-2194-421d-baa2-f27e0dc8445e" Jan 30 10:58:19 crc kubenswrapper[4869]: I0130 10:58:19.148572 4869 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cf30ffd7-2194-421d-baa2-f27e0dc8445e" Jan 30 10:58:19 crc kubenswrapper[4869]: I0130 10:58:19.149094 4869 status_manager.go:851] "Failed to get status for pod" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" pod="openshift-marketplace/redhat-marketplace-ht8ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ht8ck\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:19 crc kubenswrapper[4869]: E0130 10:58:19.149390 4869 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 10:58:19 crc kubenswrapper[4869]: I0130 10:58:19.150244 4869 status_manager.go:851] "Failed to get status for pod" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" pod="openshift-marketplace/redhat-operators-dhxch" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dhxch\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:19 crc kubenswrapper[4869]: I0130 10:58:19.150578 4869 status_manager.go:851] "Failed to get status for pod" podUID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" pod="openshift-marketplace/community-operators-gcxdk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gcxdk\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:19 crc kubenswrapper[4869]: I0130 10:58:19.151180 4869 status_manager.go:851] "Failed to get status for pod" podUID="d28fe085-7a0e-4de6-8579-88e9583b87a3" pod="openshift-marketplace/certified-operators-m7nxw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-m7nxw\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:19 crc kubenswrapper[4869]: I0130 10:58:19.151631 4869 status_manager.go:851] "Failed to get status for pod" podUID="1cc76d4c-23ec-4747-8441-727344a3ccff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:19 crc kubenswrapper[4869]: I0130 10:58:19.152278 4869 status_manager.go:851] "Failed to get status for pod" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-d68sd\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:19 crc kubenswrapper[4869]: I0130 10:58:19.152826 4869 status_manager.go:851] "Failed to get status for pod" podUID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" pod="openshift-marketplace/marketplace-operator-79b997595-rkwmf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-rkwmf\": dial tcp 38.102.83.246:6443: connect: connection refused" Jan 30 10:58:20 crc kubenswrapper[4869]: I0130 10:58:20.162107 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"23150ab21e52ec286bcb21d210b8381edf9ac9b301089c4070a1588c9359fa0e"} Jan 30 10:58:20 crc kubenswrapper[4869]: I0130 10:58:20.162403 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"24a49b7d6928d88f9631c1c510cdee39c8dcab36890e91b8fd25bddeaf7c38fd"} Jan 30 10:58:20 crc kubenswrapper[4869]: I0130 10:58:20.162416 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"baa0193424259d71540c94d64b86680d6797eddb0965bfe178c57e64017a6518"} Jan 30 10:58:20 crc kubenswrapper[4869]: I0130 10:58:20.162425 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"4a7ec9936d477ca80f9cd271d687f42066de4904b3341e02736f2b30c02492f7"} Jan 30 10:58:20 crc kubenswrapper[4869]: I0130 10:58:20.167580 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 30 10:58:20 crc kubenswrapper[4869]: I0130 10:58:20.167631 4869 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046" exitCode=1 Jan 30 10:58:20 crc kubenswrapper[4869]: I0130 10:58:20.167658 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046"} Jan 30 10:58:20 crc kubenswrapper[4869]: I0130 10:58:20.168025 4869 scope.go:117] "RemoveContainer" containerID="edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046" Jan 30 10:58:21 crc kubenswrapper[4869]: I0130 10:58:21.176087 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"d6e67faf3bea33f8685a92f4f3d18635b31cf480b5e398a1e2a1d6ccf4fde31b"} Jan 30 10:58:21 crc kubenswrapper[4869]: I0130 10:58:21.176861 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 10:58:21 crc kubenswrapper[4869]: I0130 10:58:21.176386 4869 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cf30ffd7-2194-421d-baa2-f27e0dc8445e" Jan 30 10:58:21 crc kubenswrapper[4869]: I0130 10:58:21.177012 4869 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cf30ffd7-2194-421d-baa2-f27e0dc8445e" Jan 30 10:58:21 crc kubenswrapper[4869]: I0130 10:58:21.179562 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 30 10:58:21 crc kubenswrapper[4869]: I0130 10:58:21.179688 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e95e1376be3faeb427fe924d8ceb47ba7321eef65ba113ec32f83aa00481ac48"} Jan 30 10:58:22 crc kubenswrapper[4869]: I0130 10:58:22.674671 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 10:58:22 crc kubenswrapper[4869]: I0130 10:58:22.674868 4869 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 30 10:58:22 crc kubenswrapper[4869]: I0130 10:58:22.675074 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 30 10:58:23 crc kubenswrapper[4869]: I0130 10:58:23.152205 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 10:58:23 crc kubenswrapper[4869]: I0130 10:58:23.152257 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 10:58:23 crc kubenswrapper[4869]: I0130 10:58:23.157563 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 10:58:25 crc kubenswrapper[4869]: I0130 10:58:25.132546 4869 scope.go:117] "RemoveContainer" containerID="c103567a0217635df6d484a7f9d68bb3cd9882c81b538fdccdaa52d7fc332f95" Jan 30 10:58:26 crc kubenswrapper[4869]: I0130 10:58:26.186458 4869 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 10:58:26 crc kubenswrapper[4869]: I0130 10:58:26.204304 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-d68sd_98cb9d90-57ea-4bf2-8ee4-dbcf18e79293/marketplace-operator/2.log" Jan 30 10:58:26 crc kubenswrapper[4869]: I0130 10:58:26.204788 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-d68sd_98cb9d90-57ea-4bf2-8ee4-dbcf18e79293/marketplace-operator/1.log" Jan 30 10:58:26 crc kubenswrapper[4869]: I0130 10:58:26.204824 4869 generic.go:334] "Generic (PLEG): container finished" podID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" containerID="faa80c979506dc3865b450c5d325ab4f92b698536a3b5822adeeb53bfdbc40c7" exitCode=1 Jan 30 10:58:26 crc kubenswrapper[4869]: I0130 10:58:26.204917 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" event={"ID":"98cb9d90-57ea-4bf2-8ee4-dbcf18e79293","Type":"ContainerDied","Data":"faa80c979506dc3865b450c5d325ab4f92b698536a3b5822adeeb53bfdbc40c7"} Jan 30 10:58:26 crc kubenswrapper[4869]: I0130 10:58:26.204989 4869 scope.go:117] "RemoveContainer" containerID="c103567a0217635df6d484a7f9d68bb3cd9882c81b538fdccdaa52d7fc332f95" Jan 30 10:58:26 crc kubenswrapper[4869]: I0130 10:58:26.205200 4869 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cf30ffd7-2194-421d-baa2-f27e0dc8445e" Jan 30 10:58:26 crc kubenswrapper[4869]: I0130 10:58:26.205220 4869 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cf30ffd7-2194-421d-baa2-f27e0dc8445e" Jan 30 10:58:26 crc kubenswrapper[4869]: I0130 10:58:26.205697 4869 scope.go:117] "RemoveContainer" containerID="faa80c979506dc3865b450c5d325ab4f92b698536a3b5822adeeb53bfdbc40c7" Jan 30 10:58:26 crc kubenswrapper[4869]: E0130 10:58:26.205948 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 20s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-d68sd_openshift-marketplace(98cb9d90-57ea-4bf2-8ee4-dbcf18e79293)\"" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" Jan 30 10:58:26 crc kubenswrapper[4869]: I0130 10:58:26.211659 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 10:58:26 crc kubenswrapper[4869]: I0130 10:58:26.240327 4869 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="4fce9326-0030-4225-809e-c970e243e640" Jan 30 10:58:26 crc kubenswrapper[4869]: I0130 10:58:26.852076 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 10:58:27 crc kubenswrapper[4869]: I0130 10:58:27.212616 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-d68sd_98cb9d90-57ea-4bf2-8ee4-dbcf18e79293/marketplace-operator/2.log" Jan 30 10:58:27 crc kubenswrapper[4869]: I0130 10:58:27.213304 4869 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cf30ffd7-2194-421d-baa2-f27e0dc8445e" Jan 30 10:58:27 crc kubenswrapper[4869]: I0130 10:58:27.213339 4869 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cf30ffd7-2194-421d-baa2-f27e0dc8445e" Jan 30 10:58:30 crc kubenswrapper[4869]: I0130 10:58:30.147104 4869 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="4fce9326-0030-4225-809e-c970e243e640" Jan 30 10:58:32 crc kubenswrapper[4869]: I0130 10:58:32.675278 4869 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 30 10:58:32 crc kubenswrapper[4869]: I0130 10:58:32.675829 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 30 10:58:34 crc kubenswrapper[4869]: I0130 10:58:34.026636 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" Jan 30 10:58:34 crc kubenswrapper[4869]: I0130 10:58:34.027083 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" Jan 30 10:58:34 crc kubenswrapper[4869]: I0130 10:58:34.027560 4869 scope.go:117] "RemoveContainer" containerID="faa80c979506dc3865b450c5d325ab4f92b698536a3b5822adeeb53bfdbc40c7" Jan 30 10:58:34 crc kubenswrapper[4869]: E0130 10:58:34.027846 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 20s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-d68sd_openshift-marketplace(98cb9d90-57ea-4bf2-8ee4-dbcf18e79293)\"" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" Jan 30 10:58:35 crc kubenswrapper[4869]: I0130 10:58:35.593240 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 30 10:58:35 crc kubenswrapper[4869]: I0130 10:58:35.593318 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 30 10:58:35 crc kubenswrapper[4869]: I0130 10:58:35.932063 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 30 10:58:36 crc kubenswrapper[4869]: I0130 10:58:36.217111 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 30 10:58:37 crc kubenswrapper[4869]: I0130 10:58:37.088889 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 30 10:58:37 crc kubenswrapper[4869]: I0130 10:58:37.183409 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 30 10:58:37 crc kubenswrapper[4869]: I0130 10:58:37.229346 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 30 10:58:37 crc kubenswrapper[4869]: I0130 10:58:37.457485 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 30 10:58:37 crc kubenswrapper[4869]: I0130 10:58:37.525922 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 30 10:58:37 crc kubenswrapper[4869]: I0130 10:58:37.541101 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 30 10:58:37 crc kubenswrapper[4869]: I0130 10:58:37.591761 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 30 10:58:37 crc kubenswrapper[4869]: I0130 10:58:37.595687 4869 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 30 10:58:37 crc kubenswrapper[4869]: I0130 10:58:37.599863 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-dhxch","openshift-kube-apiserver/kube-apiserver-crc","openshift-marketplace/redhat-marketplace-ht8ck","openshift-marketplace/marketplace-operator-79b997595-rkwmf","openshift-marketplace/certified-operators-m7nxw","openshift-marketplace/community-operators-gcxdk"] Jan 30 10:58:37 crc kubenswrapper[4869]: I0130 10:58:37.599955 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 30 10:58:37 crc kubenswrapper[4869]: I0130 10:58:37.604019 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 10:58:37 crc kubenswrapper[4869]: I0130 10:58:37.638587 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=11.638570238 podStartE2EDuration="11.638570238s" podCreationTimestamp="2026-01-30 10:58:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:58:37.622002112 +0000 UTC m=+268.171878178" watchObservedRunningTime="2026-01-30 10:58:37.638570238 +0000 UTC m=+268.188446304" Jan 30 10:58:37 crc kubenswrapper[4869]: I0130 10:58:37.703938 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 30 10:58:37 crc kubenswrapper[4869]: I0130 10:58:37.746667 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 30 10:58:37 crc kubenswrapper[4869]: I0130 10:58:37.754411 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 30 10:58:37 crc kubenswrapper[4869]: I0130 10:58:37.812325 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 30 10:58:37 crc kubenswrapper[4869]: I0130 10:58:37.837315 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 30 10:58:37 crc kubenswrapper[4869]: I0130 10:58:37.899432 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 30 10:58:37 crc kubenswrapper[4869]: I0130 10:58:37.931300 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 30 10:58:38 crc kubenswrapper[4869]: I0130 10:58:38.011345 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 30 10:58:38 crc kubenswrapper[4869]: I0130 10:58:38.066022 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 30 10:58:38 crc kubenswrapper[4869]: I0130 10:58:38.139405 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="112aad1f-e2f9-41e8-a9c8-1d3b3297528e" path="/var/lib/kubelet/pods/112aad1f-e2f9-41e8-a9c8-1d3b3297528e/volumes" Jan 30 10:58:38 crc kubenswrapper[4869]: I0130 10:58:38.140141 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18c7bbb6-bdf6-4e26-9670-49a30b7dab22" path="/var/lib/kubelet/pods/18c7bbb6-bdf6-4e26-9670-49a30b7dab22/volumes" Jan 30 10:58:38 crc kubenswrapper[4869]: I0130 10:58:38.140811 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2db1788d-f11c-49f6-b613-d3ed750c8d8a" path="/var/lib/kubelet/pods/2db1788d-f11c-49f6-b613-d3ed750c8d8a/volumes" Jan 30 10:58:38 crc kubenswrapper[4869]: I0130 10:58:38.141979 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44c5913f-a9a0-4b9f-aa60-c6158d19a38a" path="/var/lib/kubelet/pods/44c5913f-a9a0-4b9f-aa60-c6158d19a38a/volumes" Jan 30 10:58:38 crc kubenswrapper[4869]: I0130 10:58:38.142464 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d28fe085-7a0e-4de6-8579-88e9583b87a3" path="/var/lib/kubelet/pods/d28fe085-7a0e-4de6-8579-88e9583b87a3/volumes" Jan 30 10:58:38 crc kubenswrapper[4869]: I0130 10:58:38.190275 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 30 10:58:38 crc kubenswrapper[4869]: I0130 10:58:38.219693 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 30 10:58:38 crc kubenswrapper[4869]: I0130 10:58:38.300599 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 30 10:58:38 crc kubenswrapper[4869]: I0130 10:58:38.390951 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 30 10:58:38 crc kubenswrapper[4869]: I0130 10:58:38.419754 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 30 10:58:38 crc kubenswrapper[4869]: I0130 10:58:38.529563 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 30 10:58:38 crc kubenswrapper[4869]: I0130 10:58:38.593427 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 30 10:58:38 crc kubenswrapper[4869]: I0130 10:58:38.697928 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 30 10:58:38 crc kubenswrapper[4869]: I0130 10:58:38.748191 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 30 10:58:38 crc kubenswrapper[4869]: I0130 10:58:38.774126 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 30 10:58:38 crc kubenswrapper[4869]: I0130 10:58:38.845219 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 30 10:58:38 crc kubenswrapper[4869]: I0130 10:58:38.882220 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 30 10:58:39 crc kubenswrapper[4869]: I0130 10:58:39.051901 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 30 10:58:39 crc kubenswrapper[4869]: I0130 10:58:39.093633 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 30 10:58:39 crc kubenswrapper[4869]: I0130 10:58:39.167789 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 30 10:58:39 crc kubenswrapper[4869]: I0130 10:58:39.186972 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 30 10:58:39 crc kubenswrapper[4869]: I0130 10:58:39.346448 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 30 10:58:39 crc kubenswrapper[4869]: I0130 10:58:39.391930 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 30 10:58:39 crc kubenswrapper[4869]: I0130 10:58:39.434037 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 30 10:58:39 crc kubenswrapper[4869]: I0130 10:58:39.517488 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 30 10:58:39 crc kubenswrapper[4869]: I0130 10:58:39.664074 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 30 10:58:39 crc kubenswrapper[4869]: I0130 10:58:39.865446 4869 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 30 10:58:39 crc kubenswrapper[4869]: I0130 10:58:39.900503 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 30 10:58:39 crc kubenswrapper[4869]: I0130 10:58:39.937414 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 30 10:58:40 crc kubenswrapper[4869]: I0130 10:58:40.012979 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 30 10:58:40 crc kubenswrapper[4869]: I0130 10:58:40.020268 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 30 10:58:40 crc kubenswrapper[4869]: I0130 10:58:40.022242 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 30 10:58:40 crc kubenswrapper[4869]: I0130 10:58:40.045412 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 30 10:58:40 crc kubenswrapper[4869]: I0130 10:58:40.190146 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 30 10:58:40 crc kubenswrapper[4869]: I0130 10:58:40.210605 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 30 10:58:40 crc kubenswrapper[4869]: I0130 10:58:40.216765 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 30 10:58:40 crc kubenswrapper[4869]: I0130 10:58:40.279476 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 30 10:58:40 crc kubenswrapper[4869]: I0130 10:58:40.309017 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 30 10:58:40 crc kubenswrapper[4869]: I0130 10:58:40.334107 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 30 10:58:40 crc kubenswrapper[4869]: I0130 10:58:40.423531 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 30 10:58:40 crc kubenswrapper[4869]: I0130 10:58:40.452101 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 30 10:58:40 crc kubenswrapper[4869]: I0130 10:58:40.666528 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 30 10:58:40 crc kubenswrapper[4869]: I0130 10:58:40.716556 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 30 10:58:40 crc kubenswrapper[4869]: I0130 10:58:40.719124 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 30 10:58:40 crc kubenswrapper[4869]: I0130 10:58:40.759841 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 30 10:58:40 crc kubenswrapper[4869]: I0130 10:58:40.814450 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 30 10:58:40 crc kubenswrapper[4869]: I0130 10:58:40.836338 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 30 10:58:40 crc kubenswrapper[4869]: I0130 10:58:40.843174 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 30 10:58:40 crc kubenswrapper[4869]: I0130 10:58:40.910852 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 30 10:58:41 crc kubenswrapper[4869]: I0130 10:58:41.333068 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 30 10:58:41 crc kubenswrapper[4869]: I0130 10:58:41.370855 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 30 10:58:41 crc kubenswrapper[4869]: I0130 10:58:41.385437 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 30 10:58:41 crc kubenswrapper[4869]: I0130 10:58:41.583696 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 30 10:58:41 crc kubenswrapper[4869]: I0130 10:58:41.623061 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 30 10:58:41 crc kubenswrapper[4869]: I0130 10:58:41.686961 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 30 10:58:41 crc kubenswrapper[4869]: I0130 10:58:41.728255 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 30 10:58:41 crc kubenswrapper[4869]: I0130 10:58:41.812661 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 30 10:58:41 crc kubenswrapper[4869]: I0130 10:58:41.873320 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 30 10:58:41 crc kubenswrapper[4869]: I0130 10:58:41.879190 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 30 10:58:41 crc kubenswrapper[4869]: I0130 10:58:41.880414 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 30 10:58:41 crc kubenswrapper[4869]: I0130 10:58:41.886631 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 30 10:58:41 crc kubenswrapper[4869]: I0130 10:58:41.925200 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 30 10:58:41 crc kubenswrapper[4869]: I0130 10:58:41.954614 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 30 10:58:41 crc kubenswrapper[4869]: I0130 10:58:41.999794 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 30 10:58:42 crc kubenswrapper[4869]: I0130 10:58:42.020377 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 30 10:58:42 crc kubenswrapper[4869]: I0130 10:58:42.250177 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 30 10:58:42 crc kubenswrapper[4869]: I0130 10:58:42.291364 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 30 10:58:42 crc kubenswrapper[4869]: I0130 10:58:42.348894 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 30 10:58:42 crc kubenswrapper[4869]: I0130 10:58:42.414240 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 30 10:58:42 crc kubenswrapper[4869]: I0130 10:58:42.456389 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 30 10:58:42 crc kubenswrapper[4869]: I0130 10:58:42.460857 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 30 10:58:42 crc kubenswrapper[4869]: I0130 10:58:42.592598 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 30 10:58:42 crc kubenswrapper[4869]: I0130 10:58:42.603430 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 30 10:58:42 crc kubenswrapper[4869]: I0130 10:58:42.674983 4869 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 30 10:58:42 crc kubenswrapper[4869]: I0130 10:58:42.675050 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 30 10:58:42 crc kubenswrapper[4869]: I0130 10:58:42.675101 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 10:58:42 crc kubenswrapper[4869]: I0130 10:58:42.676053 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-controller-manager" containerStatusID={"Type":"cri-o","ID":"e95e1376be3faeb427fe924d8ceb47ba7321eef65ba113ec32f83aa00481ac48"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container kube-controller-manager failed startup probe, will be restarted" Jan 30 10:58:42 crc kubenswrapper[4869]: I0130 10:58:42.676157 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" containerID="cri-o://e95e1376be3faeb427fe924d8ceb47ba7321eef65ba113ec32f83aa00481ac48" gracePeriod=30 Jan 30 10:58:42 crc kubenswrapper[4869]: I0130 10:58:42.776924 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 30 10:58:42 crc kubenswrapper[4869]: I0130 10:58:42.860637 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 30 10:58:42 crc kubenswrapper[4869]: I0130 10:58:42.875818 4869 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 30 10:58:42 crc kubenswrapper[4869]: I0130 10:58:42.910029 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 30 10:58:42 crc kubenswrapper[4869]: I0130 10:58:42.911834 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 30 10:58:42 crc kubenswrapper[4869]: I0130 10:58:42.928125 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 30 10:58:42 crc kubenswrapper[4869]: I0130 10:58:42.958452 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 30 10:58:43 crc kubenswrapper[4869]: I0130 10:58:43.012922 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 30 10:58:43 crc kubenswrapper[4869]: I0130 10:58:43.164736 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 30 10:58:43 crc kubenswrapper[4869]: I0130 10:58:43.211382 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 30 10:58:43 crc kubenswrapper[4869]: I0130 10:58:43.235413 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 30 10:58:43 crc kubenswrapper[4869]: I0130 10:58:43.249148 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 30 10:58:43 crc kubenswrapper[4869]: I0130 10:58:43.268654 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 30 10:58:43 crc kubenswrapper[4869]: I0130 10:58:43.283910 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 30 10:58:43 crc kubenswrapper[4869]: I0130 10:58:43.287838 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 30 10:58:43 crc kubenswrapper[4869]: I0130 10:58:43.303671 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 30 10:58:43 crc kubenswrapper[4869]: I0130 10:58:43.383889 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 30 10:58:43 crc kubenswrapper[4869]: I0130 10:58:43.487632 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 30 10:58:43 crc kubenswrapper[4869]: I0130 10:58:43.684973 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 30 10:58:43 crc kubenswrapper[4869]: I0130 10:58:43.689363 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 30 10:58:43 crc kubenswrapper[4869]: I0130 10:58:43.701129 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 30 10:58:43 crc kubenswrapper[4869]: I0130 10:58:43.728327 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 30 10:58:43 crc kubenswrapper[4869]: I0130 10:58:43.734745 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 30 10:58:43 crc kubenswrapper[4869]: I0130 10:58:43.744877 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 30 10:58:43 crc kubenswrapper[4869]: I0130 10:58:43.829028 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 30 10:58:43 crc kubenswrapper[4869]: I0130 10:58:43.842279 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 30 10:58:43 crc kubenswrapper[4869]: I0130 10:58:43.992236 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 30 10:58:44 crc kubenswrapper[4869]: I0130 10:58:44.119052 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 30 10:58:44 crc kubenswrapper[4869]: I0130 10:58:44.222273 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 30 10:58:44 crc kubenswrapper[4869]: I0130 10:58:44.317470 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 30 10:58:44 crc kubenswrapper[4869]: I0130 10:58:44.321332 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 30 10:58:44 crc kubenswrapper[4869]: I0130 10:58:44.344196 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 30 10:58:44 crc kubenswrapper[4869]: I0130 10:58:44.352855 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 30 10:58:44 crc kubenswrapper[4869]: I0130 10:58:44.398307 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 30 10:58:44 crc kubenswrapper[4869]: I0130 10:58:44.467635 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 30 10:58:44 crc kubenswrapper[4869]: I0130 10:58:44.506851 4869 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 30 10:58:44 crc kubenswrapper[4869]: I0130 10:58:44.666010 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 30 10:58:44 crc kubenswrapper[4869]: I0130 10:58:44.674606 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 30 10:58:44 crc kubenswrapper[4869]: I0130 10:58:44.678629 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 30 10:58:44 crc kubenswrapper[4869]: I0130 10:58:44.712259 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 30 10:58:44 crc kubenswrapper[4869]: I0130 10:58:44.831300 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 30 10:58:44 crc kubenswrapper[4869]: I0130 10:58:44.872544 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 30 10:58:44 crc kubenswrapper[4869]: I0130 10:58:44.954718 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 30 10:58:45 crc kubenswrapper[4869]: I0130 10:58:45.013342 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 30 10:58:45 crc kubenswrapper[4869]: I0130 10:58:45.184093 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 30 10:58:45 crc kubenswrapper[4869]: I0130 10:58:45.254171 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 30 10:58:45 crc kubenswrapper[4869]: I0130 10:58:45.256824 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 30 10:58:45 crc kubenswrapper[4869]: I0130 10:58:45.257672 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 30 10:58:45 crc kubenswrapper[4869]: I0130 10:58:45.306063 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 30 10:58:45 crc kubenswrapper[4869]: I0130 10:58:45.347515 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 30 10:58:45 crc kubenswrapper[4869]: I0130 10:58:45.402284 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 30 10:58:45 crc kubenswrapper[4869]: I0130 10:58:45.420953 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 30 10:58:45 crc kubenswrapper[4869]: I0130 10:58:45.436369 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 30 10:58:45 crc kubenswrapper[4869]: I0130 10:58:45.606382 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 30 10:58:45 crc kubenswrapper[4869]: I0130 10:58:45.606477 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 30 10:58:45 crc kubenswrapper[4869]: I0130 10:58:45.699282 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 30 10:58:45 crc kubenswrapper[4869]: I0130 10:58:45.728659 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 30 10:58:45 crc kubenswrapper[4869]: I0130 10:58:45.739953 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 30 10:58:45 crc kubenswrapper[4869]: I0130 10:58:45.900290 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 30 10:58:45 crc kubenswrapper[4869]: I0130 10:58:45.917885 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 30 10:58:45 crc kubenswrapper[4869]: I0130 10:58:45.933835 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 30 10:58:45 crc kubenswrapper[4869]: I0130 10:58:45.987514 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 30 10:58:46 crc kubenswrapper[4869]: I0130 10:58:46.019926 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 30 10:58:46 crc kubenswrapper[4869]: I0130 10:58:46.029148 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 30 10:58:46 crc kubenswrapper[4869]: I0130 10:58:46.089306 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 30 10:58:46 crc kubenswrapper[4869]: I0130 10:58:46.102779 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 30 10:58:46 crc kubenswrapper[4869]: I0130 10:58:46.114271 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 30 10:58:46 crc kubenswrapper[4869]: I0130 10:58:46.149589 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 30 10:58:46 crc kubenswrapper[4869]: I0130 10:58:46.166132 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 30 10:58:46 crc kubenswrapper[4869]: I0130 10:58:46.183726 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 30 10:58:46 crc kubenswrapper[4869]: I0130 10:58:46.275309 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 30 10:58:46 crc kubenswrapper[4869]: I0130 10:58:46.302509 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 30 10:58:46 crc kubenswrapper[4869]: I0130 10:58:46.358477 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 30 10:58:46 crc kubenswrapper[4869]: I0130 10:58:46.381334 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 30 10:58:46 crc kubenswrapper[4869]: I0130 10:58:46.481759 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 30 10:58:46 crc kubenswrapper[4869]: I0130 10:58:46.548965 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 30 10:58:46 crc kubenswrapper[4869]: I0130 10:58:46.564580 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 30 10:58:46 crc kubenswrapper[4869]: I0130 10:58:46.574568 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 30 10:58:46 crc kubenswrapper[4869]: I0130 10:58:46.618243 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 30 10:58:46 crc kubenswrapper[4869]: I0130 10:58:46.672941 4869 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 30 10:58:46 crc kubenswrapper[4869]: I0130 10:58:46.694962 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 30 10:58:46 crc kubenswrapper[4869]: I0130 10:58:46.722421 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 30 10:58:46 crc kubenswrapper[4869]: I0130 10:58:46.878741 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 30 10:58:46 crc kubenswrapper[4869]: I0130 10:58:46.950184 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 30 10:58:47 crc kubenswrapper[4869]: I0130 10:58:47.092680 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 30 10:58:47 crc kubenswrapper[4869]: I0130 10:58:47.105352 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 30 10:58:47 crc kubenswrapper[4869]: I0130 10:58:47.113966 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 30 10:58:47 crc kubenswrapper[4869]: I0130 10:58:47.171942 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 30 10:58:47 crc kubenswrapper[4869]: I0130 10:58:47.279841 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 30 10:58:47 crc kubenswrapper[4869]: I0130 10:58:47.343418 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 30 10:58:47 crc kubenswrapper[4869]: I0130 10:58:47.354453 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 30 10:58:47 crc kubenswrapper[4869]: I0130 10:58:47.377781 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 30 10:58:47 crc kubenswrapper[4869]: I0130 10:58:47.396491 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 30 10:58:47 crc kubenswrapper[4869]: I0130 10:58:47.407088 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 30 10:58:47 crc kubenswrapper[4869]: I0130 10:58:47.407968 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 30 10:58:47 crc kubenswrapper[4869]: I0130 10:58:47.471972 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 30 10:58:47 crc kubenswrapper[4869]: I0130 10:58:47.479304 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 30 10:58:47 crc kubenswrapper[4869]: I0130 10:58:47.566154 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 30 10:58:47 crc kubenswrapper[4869]: I0130 10:58:47.584134 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 30 10:58:47 crc kubenswrapper[4869]: I0130 10:58:47.629654 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 30 10:58:47 crc kubenswrapper[4869]: I0130 10:58:47.775750 4869 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 30 10:58:47 crc kubenswrapper[4869]: I0130 10:58:47.776230 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 30 10:58:47 crc kubenswrapper[4869]: I0130 10:58:47.776354 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 30 10:58:47 crc kubenswrapper[4869]: I0130 10:58:47.776468 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 30 10:58:47 crc kubenswrapper[4869]: I0130 10:58:47.810404 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 30 10:58:47 crc kubenswrapper[4869]: I0130 10:58:47.961058 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 30 10:58:47 crc kubenswrapper[4869]: I0130 10:58:47.964981 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 30 10:58:47 crc kubenswrapper[4869]: I0130 10:58:47.985819 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 30 10:58:48 crc kubenswrapper[4869]: I0130 10:58:48.064780 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 30 10:58:48 crc kubenswrapper[4869]: I0130 10:58:48.069538 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 30 10:58:48 crc kubenswrapper[4869]: I0130 10:58:48.074131 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 30 10:58:48 crc kubenswrapper[4869]: I0130 10:58:48.134550 4869 scope.go:117] "RemoveContainer" containerID="faa80c979506dc3865b450c5d325ab4f92b698536a3b5822adeeb53bfdbc40c7" Jan 30 10:58:48 crc kubenswrapper[4869]: I0130 10:58:48.150668 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 30 10:58:48 crc kubenswrapper[4869]: I0130 10:58:48.237063 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 30 10:58:48 crc kubenswrapper[4869]: I0130 10:58:48.253065 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 30 10:58:48 crc kubenswrapper[4869]: I0130 10:58:48.305133 4869 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 30 10:58:48 crc kubenswrapper[4869]: I0130 10:58:48.305387 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://81e79c074563928047167b1b45e770719f4e000bb1dbb002f0fae68dc2d5a851" gracePeriod=5 Jan 30 10:58:48 crc kubenswrapper[4869]: I0130 10:58:48.306877 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 30 10:58:48 crc kubenswrapper[4869]: I0130 10:58:48.321017 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-d68sd_98cb9d90-57ea-4bf2-8ee4-dbcf18e79293/marketplace-operator/2.log" Jan 30 10:58:48 crc kubenswrapper[4869]: I0130 10:58:48.321072 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" event={"ID":"98cb9d90-57ea-4bf2-8ee4-dbcf18e79293","Type":"ContainerStarted","Data":"ea3c3e3fa2d959c6dd05d9eee07a171f4bc0b8938acd5531a7fe857d777c333a"} Jan 30 10:58:48 crc kubenswrapper[4869]: I0130 10:58:48.321398 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" Jan 30 10:58:48 crc kubenswrapper[4869]: I0130 10:58:48.322961 4869 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-d68sd container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.57:8080/healthz\": dial tcp 10.217.0.57:8080: connect: connection refused" start-of-body= Jan 30 10:58:48 crc kubenswrapper[4869]: I0130 10:58:48.322997 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" podUID="98cb9d90-57ea-4bf2-8ee4-dbcf18e79293" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.57:8080/healthz\": dial tcp 10.217.0.57:8080: connect: connection refused" Jan 30 10:58:48 crc kubenswrapper[4869]: I0130 10:58:48.352922 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" podStartSLOduration=45.352907797 podStartE2EDuration="45.352907797s" podCreationTimestamp="2026-01-30 10:58:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:58:48.34859129 +0000 UTC m=+278.898467356" watchObservedRunningTime="2026-01-30 10:58:48.352907797 +0000 UTC m=+278.902783863" Jan 30 10:58:48 crc kubenswrapper[4869]: I0130 10:58:48.569968 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 30 10:58:48 crc kubenswrapper[4869]: I0130 10:58:48.593052 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 30 10:58:48 crc kubenswrapper[4869]: I0130 10:58:48.624804 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 30 10:58:48 crc kubenswrapper[4869]: I0130 10:58:48.682224 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 30 10:58:48 crc kubenswrapper[4869]: I0130 10:58:48.724319 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 30 10:58:48 crc kubenswrapper[4869]: I0130 10:58:48.874939 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 30 10:58:48 crc kubenswrapper[4869]: I0130 10:58:48.883389 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 30 10:58:48 crc kubenswrapper[4869]: I0130 10:58:48.980282 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 30 10:58:49 crc kubenswrapper[4869]: I0130 10:58:49.038563 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 30 10:58:49 crc kubenswrapper[4869]: I0130 10:58:49.184321 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 30 10:58:49 crc kubenswrapper[4869]: I0130 10:58:49.328790 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-d68sd" Jan 30 10:58:49 crc kubenswrapper[4869]: I0130 10:58:49.364644 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 30 10:58:49 crc kubenswrapper[4869]: I0130 10:58:49.391093 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 30 10:58:49 crc kubenswrapper[4869]: I0130 10:58:49.471762 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 30 10:58:49 crc kubenswrapper[4869]: I0130 10:58:49.497455 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 30 10:58:49 crc kubenswrapper[4869]: I0130 10:58:49.498827 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 30 10:58:49 crc kubenswrapper[4869]: I0130 10:58:49.548773 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 30 10:58:49 crc kubenswrapper[4869]: I0130 10:58:49.560327 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 30 10:58:49 crc kubenswrapper[4869]: I0130 10:58:49.768125 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 30 10:58:49 crc kubenswrapper[4869]: I0130 10:58:49.826242 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 30 10:58:49 crc kubenswrapper[4869]: I0130 10:58:49.874942 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 30 10:58:49 crc kubenswrapper[4869]: I0130 10:58:49.905912 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 30 10:58:49 crc kubenswrapper[4869]: I0130 10:58:49.907073 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 30 10:58:49 crc kubenswrapper[4869]: I0130 10:58:49.950301 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 30 10:58:49 crc kubenswrapper[4869]: I0130 10:58:49.969924 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 30 10:58:50 crc kubenswrapper[4869]: I0130 10:58:50.007703 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 30 10:58:50 crc kubenswrapper[4869]: I0130 10:58:50.278582 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 30 10:58:50 crc kubenswrapper[4869]: I0130 10:58:50.286907 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 30 10:58:50 crc kubenswrapper[4869]: I0130 10:58:50.326330 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 30 10:58:50 crc kubenswrapper[4869]: I0130 10:58:50.467007 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 30 10:58:50 crc kubenswrapper[4869]: I0130 10:58:50.629205 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 30 10:58:50 crc kubenswrapper[4869]: I0130 10:58:50.647501 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 30 10:58:50 crc kubenswrapper[4869]: I0130 10:58:50.779601 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 30 10:58:51 crc kubenswrapper[4869]: I0130 10:58:51.107910 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 30 10:58:51 crc kubenswrapper[4869]: I0130 10:58:51.187946 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 30 10:58:51 crc kubenswrapper[4869]: I0130 10:58:51.280801 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 30 10:58:51 crc kubenswrapper[4869]: I0130 10:58:51.325670 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 30 10:58:51 crc kubenswrapper[4869]: I0130 10:58:51.360074 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 30 10:58:51 crc kubenswrapper[4869]: I0130 10:58:51.429818 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 30 10:58:51 crc kubenswrapper[4869]: I0130 10:58:51.501380 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 30 10:58:51 crc kubenswrapper[4869]: I0130 10:58:51.971034 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 30 10:58:51 crc kubenswrapper[4869]: I0130 10:58:51.987915 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 30 10:58:52 crc kubenswrapper[4869]: I0130 10:58:52.386471 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 30 10:58:53 crc kubenswrapper[4869]: I0130 10:58:53.345132 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 30 10:58:53 crc kubenswrapper[4869]: I0130 10:58:53.345414 4869 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="81e79c074563928047167b1b45e770719f4e000bb1dbb002f0fae68dc2d5a851" exitCode=137 Jan 30 10:58:53 crc kubenswrapper[4869]: I0130 10:58:53.367473 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 30 10:58:53 crc kubenswrapper[4869]: I0130 10:58:53.687062 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 30 10:58:53 crc kubenswrapper[4869]: I0130 10:58:53.870006 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 30 10:58:53 crc kubenswrapper[4869]: I0130 10:58:53.870075 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 10:58:53 crc kubenswrapper[4869]: I0130 10:58:53.961430 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 30 10:58:53 crc kubenswrapper[4869]: I0130 10:58:53.961478 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 30 10:58:53 crc kubenswrapper[4869]: I0130 10:58:53.961503 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 30 10:58:53 crc kubenswrapper[4869]: I0130 10:58:53.961521 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 30 10:58:53 crc kubenswrapper[4869]: I0130 10:58:53.961538 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 30 10:58:53 crc kubenswrapper[4869]: I0130 10:58:53.961753 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 10:58:53 crc kubenswrapper[4869]: I0130 10:58:53.961785 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 10:58:53 crc kubenswrapper[4869]: I0130 10:58:53.961815 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 10:58:53 crc kubenswrapper[4869]: I0130 10:58:53.961829 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 10:58:53 crc kubenswrapper[4869]: I0130 10:58:53.970885 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 10:58:54 crc kubenswrapper[4869]: I0130 10:58:54.062206 4869 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:54 crc kubenswrapper[4869]: I0130 10:58:54.062247 4869 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:54 crc kubenswrapper[4869]: I0130 10:58:54.062257 4869 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:54 crc kubenswrapper[4869]: I0130 10:58:54.062266 4869 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:54 crc kubenswrapper[4869]: I0130 10:58:54.062274 4869 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 30 10:58:54 crc kubenswrapper[4869]: I0130 10:58:54.139792 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Jan 30 10:58:54 crc kubenswrapper[4869]: I0130 10:58:54.350895 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 30 10:58:54 crc kubenswrapper[4869]: I0130 10:58:54.350975 4869 scope.go:117] "RemoveContainer" containerID="81e79c074563928047167b1b45e770719f4e000bb1dbb002f0fae68dc2d5a851" Jan 30 10:58:54 crc kubenswrapper[4869]: I0130 10:58:54.351030 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 10:59:09 crc kubenswrapper[4869]: I0130 10:59:09.934884 4869 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Jan 30 10:59:12 crc kubenswrapper[4869]: E0130 10:59:12.790469 4869 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-e95e1376be3faeb427fe924d8ceb47ba7321eef65ba113ec32f83aa00481ac48.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-conmon-e95e1376be3faeb427fe924d8ceb47ba7321eef65ba113ec32f83aa00481ac48.scope\": RecentStats: unable to find data in memory cache]" Jan 30 10:59:13 crc kubenswrapper[4869]: I0130 10:59:13.459168 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Jan 30 10:59:13 crc kubenswrapper[4869]: I0130 10:59:13.461061 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 30 10:59:13 crc kubenswrapper[4869]: I0130 10:59:13.461122 4869 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="e95e1376be3faeb427fe924d8ceb47ba7321eef65ba113ec32f83aa00481ac48" exitCode=137 Jan 30 10:59:13 crc kubenswrapper[4869]: I0130 10:59:13.461152 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"e95e1376be3faeb427fe924d8ceb47ba7321eef65ba113ec32f83aa00481ac48"} Jan 30 10:59:13 crc kubenswrapper[4869]: I0130 10:59:13.461178 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e6677ec6e5116075b91c479c4be6084a56a6c8b03b730cdf9ef150198efac9e1"} Jan 30 10:59:13 crc kubenswrapper[4869]: I0130 10:59:13.461194 4869 scope.go:117] "RemoveContainer" containerID="edce3110583cedc1683bf2777ae5f4dd8ac6ac1e3bc1971528bc66942da54046" Jan 30 10:59:14 crc kubenswrapper[4869]: I0130 10:59:14.467478 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Jan 30 10:59:15 crc kubenswrapper[4869]: I0130 10:59:15.775947 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-c9rvm"] Jan 30 10:59:15 crc kubenswrapper[4869]: E0130 10:59:15.776406 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cc76d4c-23ec-4747-8441-727344a3ccff" containerName="installer" Jan 30 10:59:15 crc kubenswrapper[4869]: I0130 10:59:15.776418 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cc76d4c-23ec-4747-8441-727344a3ccff" containerName="installer" Jan 30 10:59:15 crc kubenswrapper[4869]: E0130 10:59:15.776430 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 30 10:59:15 crc kubenswrapper[4869]: I0130 10:59:15.776435 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 30 10:59:15 crc kubenswrapper[4869]: I0130 10:59:15.776520 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 30 10:59:15 crc kubenswrapper[4869]: I0130 10:59:15.776533 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cc76d4c-23ec-4747-8441-727344a3ccff" containerName="installer" Jan 30 10:59:15 crc kubenswrapper[4869]: I0130 10:59:15.777214 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c9rvm" Jan 30 10:59:15 crc kubenswrapper[4869]: I0130 10:59:15.779770 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 30 10:59:15 crc kubenswrapper[4869]: I0130 10:59:15.790959 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c9rvm"] Jan 30 10:59:15 crc kubenswrapper[4869]: I0130 10:59:15.817861 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgr4x\" (UniqueName: \"kubernetes.io/projected/51e1e87e-a127-4a53-9395-a32a304f638c-kube-api-access-qgr4x\") pod \"redhat-operators-c9rvm\" (UID: \"51e1e87e-a127-4a53-9395-a32a304f638c\") " pod="openshift-marketplace/redhat-operators-c9rvm" Jan 30 10:59:15 crc kubenswrapper[4869]: I0130 10:59:15.817915 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51e1e87e-a127-4a53-9395-a32a304f638c-utilities\") pod \"redhat-operators-c9rvm\" (UID: \"51e1e87e-a127-4a53-9395-a32a304f638c\") " pod="openshift-marketplace/redhat-operators-c9rvm" Jan 30 10:59:15 crc kubenswrapper[4869]: I0130 10:59:15.817953 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51e1e87e-a127-4a53-9395-a32a304f638c-catalog-content\") pod \"redhat-operators-c9rvm\" (UID: \"51e1e87e-a127-4a53-9395-a32a304f638c\") " pod="openshift-marketplace/redhat-operators-c9rvm" Jan 30 10:59:15 crc kubenswrapper[4869]: I0130 10:59:15.918658 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgr4x\" (UniqueName: \"kubernetes.io/projected/51e1e87e-a127-4a53-9395-a32a304f638c-kube-api-access-qgr4x\") pod \"redhat-operators-c9rvm\" (UID: \"51e1e87e-a127-4a53-9395-a32a304f638c\") " pod="openshift-marketplace/redhat-operators-c9rvm" Jan 30 10:59:15 crc kubenswrapper[4869]: I0130 10:59:15.918741 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51e1e87e-a127-4a53-9395-a32a304f638c-utilities\") pod \"redhat-operators-c9rvm\" (UID: \"51e1e87e-a127-4a53-9395-a32a304f638c\") " pod="openshift-marketplace/redhat-operators-c9rvm" Jan 30 10:59:15 crc kubenswrapper[4869]: I0130 10:59:15.918771 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51e1e87e-a127-4a53-9395-a32a304f638c-catalog-content\") pod \"redhat-operators-c9rvm\" (UID: \"51e1e87e-a127-4a53-9395-a32a304f638c\") " pod="openshift-marketplace/redhat-operators-c9rvm" Jan 30 10:59:15 crc kubenswrapper[4869]: I0130 10:59:15.919255 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51e1e87e-a127-4a53-9395-a32a304f638c-catalog-content\") pod \"redhat-operators-c9rvm\" (UID: \"51e1e87e-a127-4a53-9395-a32a304f638c\") " pod="openshift-marketplace/redhat-operators-c9rvm" Jan 30 10:59:15 crc kubenswrapper[4869]: I0130 10:59:15.919600 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51e1e87e-a127-4a53-9395-a32a304f638c-utilities\") pod \"redhat-operators-c9rvm\" (UID: \"51e1e87e-a127-4a53-9395-a32a304f638c\") " pod="openshift-marketplace/redhat-operators-c9rvm" Jan 30 10:59:15 crc kubenswrapper[4869]: I0130 10:59:15.938895 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgr4x\" (UniqueName: \"kubernetes.io/projected/51e1e87e-a127-4a53-9395-a32a304f638c-kube-api-access-qgr4x\") pod \"redhat-operators-c9rvm\" (UID: \"51e1e87e-a127-4a53-9395-a32a304f638c\") " pod="openshift-marketplace/redhat-operators-c9rvm" Jan 30 10:59:15 crc kubenswrapper[4869]: I0130 10:59:15.976392 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-t99sp"] Jan 30 10:59:15 crc kubenswrapper[4869]: I0130 10:59:15.978353 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t99sp" Jan 30 10:59:15 crc kubenswrapper[4869]: I0130 10:59:15.980292 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 30 10:59:15 crc kubenswrapper[4869]: I0130 10:59:15.982226 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-t99sp"] Jan 30 10:59:16 crc kubenswrapper[4869]: I0130 10:59:16.098605 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c9rvm" Jan 30 10:59:16 crc kubenswrapper[4869]: I0130 10:59:16.121140 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9691505-782f-4384-ae50-7cd13749bfde-utilities\") pod \"redhat-marketplace-t99sp\" (UID: \"e9691505-782f-4384-ae50-7cd13749bfde\") " pod="openshift-marketplace/redhat-marketplace-t99sp" Jan 30 10:59:16 crc kubenswrapper[4869]: I0130 10:59:16.121219 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xz27g\" (UniqueName: \"kubernetes.io/projected/e9691505-782f-4384-ae50-7cd13749bfde-kube-api-access-xz27g\") pod \"redhat-marketplace-t99sp\" (UID: \"e9691505-782f-4384-ae50-7cd13749bfde\") " pod="openshift-marketplace/redhat-marketplace-t99sp" Jan 30 10:59:16 crc kubenswrapper[4869]: I0130 10:59:16.121260 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9691505-782f-4384-ae50-7cd13749bfde-catalog-content\") pod \"redhat-marketplace-t99sp\" (UID: \"e9691505-782f-4384-ae50-7cd13749bfde\") " pod="openshift-marketplace/redhat-marketplace-t99sp" Jan 30 10:59:16 crc kubenswrapper[4869]: I0130 10:59:16.222068 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xz27g\" (UniqueName: \"kubernetes.io/projected/e9691505-782f-4384-ae50-7cd13749bfde-kube-api-access-xz27g\") pod \"redhat-marketplace-t99sp\" (UID: \"e9691505-782f-4384-ae50-7cd13749bfde\") " pod="openshift-marketplace/redhat-marketplace-t99sp" Jan 30 10:59:16 crc kubenswrapper[4869]: I0130 10:59:16.222390 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9691505-782f-4384-ae50-7cd13749bfde-catalog-content\") pod \"redhat-marketplace-t99sp\" (UID: \"e9691505-782f-4384-ae50-7cd13749bfde\") " pod="openshift-marketplace/redhat-marketplace-t99sp" Jan 30 10:59:16 crc kubenswrapper[4869]: I0130 10:59:16.222432 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9691505-782f-4384-ae50-7cd13749bfde-utilities\") pod \"redhat-marketplace-t99sp\" (UID: \"e9691505-782f-4384-ae50-7cd13749bfde\") " pod="openshift-marketplace/redhat-marketplace-t99sp" Jan 30 10:59:16 crc kubenswrapper[4869]: I0130 10:59:16.223239 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9691505-782f-4384-ae50-7cd13749bfde-catalog-content\") pod \"redhat-marketplace-t99sp\" (UID: \"e9691505-782f-4384-ae50-7cd13749bfde\") " pod="openshift-marketplace/redhat-marketplace-t99sp" Jan 30 10:59:16 crc kubenswrapper[4869]: I0130 10:59:16.223434 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9691505-782f-4384-ae50-7cd13749bfde-utilities\") pod \"redhat-marketplace-t99sp\" (UID: \"e9691505-782f-4384-ae50-7cd13749bfde\") " pod="openshift-marketplace/redhat-marketplace-t99sp" Jan 30 10:59:16 crc kubenswrapper[4869]: I0130 10:59:16.243175 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xz27g\" (UniqueName: \"kubernetes.io/projected/e9691505-782f-4384-ae50-7cd13749bfde-kube-api-access-xz27g\") pod \"redhat-marketplace-t99sp\" (UID: \"e9691505-782f-4384-ae50-7cd13749bfde\") " pod="openshift-marketplace/redhat-marketplace-t99sp" Jan 30 10:59:16 crc kubenswrapper[4869]: I0130 10:59:16.295629 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t99sp" Jan 30 10:59:16 crc kubenswrapper[4869]: I0130 10:59:16.469573 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-t99sp"] Jan 30 10:59:16 crc kubenswrapper[4869]: I0130 10:59:16.494530 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t99sp" event={"ID":"e9691505-782f-4384-ae50-7cd13749bfde","Type":"ContainerStarted","Data":"f20004b9e0666dec9e74c9401e84b52c0dda880195abe4a8b33d0d0a5636d64e"} Jan 30 10:59:16 crc kubenswrapper[4869]: I0130 10:59:16.503976 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c9rvm"] Jan 30 10:59:16 crc kubenswrapper[4869]: W0130 10:59:16.507912 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod51e1e87e_a127_4a53_9395_a32a304f638c.slice/crio-74f50934ef9da057e382954f1af04e6e599df9cf048ec78c967071a00b515a55 WatchSource:0}: Error finding container 74f50934ef9da057e382954f1af04e6e599df9cf048ec78c967071a00b515a55: Status 404 returned error can't find the container with id 74f50934ef9da057e382954f1af04e6e599df9cf048ec78c967071a00b515a55 Jan 30 10:59:16 crc kubenswrapper[4869]: I0130 10:59:16.852619 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 10:59:17 crc kubenswrapper[4869]: I0130 10:59:17.502230 4869 generic.go:334] "Generic (PLEG): container finished" podID="e9691505-782f-4384-ae50-7cd13749bfde" containerID="efaeda0d43c9b28dd2b090eb8792cff9f26809bb11bb68873ee9f4315345479e" exitCode=0 Jan 30 10:59:17 crc kubenswrapper[4869]: I0130 10:59:17.502298 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t99sp" event={"ID":"e9691505-782f-4384-ae50-7cd13749bfde","Type":"ContainerDied","Data":"efaeda0d43c9b28dd2b090eb8792cff9f26809bb11bb68873ee9f4315345479e"} Jan 30 10:59:17 crc kubenswrapper[4869]: I0130 10:59:17.507029 4869 generic.go:334] "Generic (PLEG): container finished" podID="51e1e87e-a127-4a53-9395-a32a304f638c" containerID="2964a733a289a747bf2abfe24efe236027989983f9ca2a89fed6f09ef2d8fbbb" exitCode=0 Jan 30 10:59:17 crc kubenswrapper[4869]: I0130 10:59:17.507063 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c9rvm" event={"ID":"51e1e87e-a127-4a53-9395-a32a304f638c","Type":"ContainerDied","Data":"2964a733a289a747bf2abfe24efe236027989983f9ca2a89fed6f09ef2d8fbbb"} Jan 30 10:59:17 crc kubenswrapper[4869]: I0130 10:59:17.507094 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c9rvm" event={"ID":"51e1e87e-a127-4a53-9395-a32a304f638c","Type":"ContainerStarted","Data":"74f50934ef9da057e382954f1af04e6e599df9cf048ec78c967071a00b515a55"} Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.173625 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gkx59"] Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.177229 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gkx59" Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.182613 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.189004 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gkx59"] Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.247452 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2b96b64-9445-48fe-bd76-e1a23f647129-catalog-content\") pod \"certified-operators-gkx59\" (UID: \"b2b96b64-9445-48fe-bd76-e1a23f647129\") " pod="openshift-marketplace/certified-operators-gkx59" Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.247533 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2b96b64-9445-48fe-bd76-e1a23f647129-utilities\") pod \"certified-operators-gkx59\" (UID: \"b2b96b64-9445-48fe-bd76-e1a23f647129\") " pod="openshift-marketplace/certified-operators-gkx59" Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.247609 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9dctn\" (UniqueName: \"kubernetes.io/projected/b2b96b64-9445-48fe-bd76-e1a23f647129-kube-api-access-9dctn\") pod \"certified-operators-gkx59\" (UID: \"b2b96b64-9445-48fe-bd76-e1a23f647129\") " pod="openshift-marketplace/certified-operators-gkx59" Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.349179 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2b96b64-9445-48fe-bd76-e1a23f647129-catalog-content\") pod \"certified-operators-gkx59\" (UID: \"b2b96b64-9445-48fe-bd76-e1a23f647129\") " pod="openshift-marketplace/certified-operators-gkx59" Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.349692 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2b96b64-9445-48fe-bd76-e1a23f647129-utilities\") pod \"certified-operators-gkx59\" (UID: \"b2b96b64-9445-48fe-bd76-e1a23f647129\") " pod="openshift-marketplace/certified-operators-gkx59" Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.349843 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2b96b64-9445-48fe-bd76-e1a23f647129-catalog-content\") pod \"certified-operators-gkx59\" (UID: \"b2b96b64-9445-48fe-bd76-e1a23f647129\") " pod="openshift-marketplace/certified-operators-gkx59" Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.349853 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9dctn\" (UniqueName: \"kubernetes.io/projected/b2b96b64-9445-48fe-bd76-e1a23f647129-kube-api-access-9dctn\") pod \"certified-operators-gkx59\" (UID: \"b2b96b64-9445-48fe-bd76-e1a23f647129\") " pod="openshift-marketplace/certified-operators-gkx59" Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.350187 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2b96b64-9445-48fe-bd76-e1a23f647129-utilities\") pod \"certified-operators-gkx59\" (UID: \"b2b96b64-9445-48fe-bd76-e1a23f647129\") " pod="openshift-marketplace/certified-operators-gkx59" Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.377562 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9dctn\" (UniqueName: \"kubernetes.io/projected/b2b96b64-9445-48fe-bd76-e1a23f647129-kube-api-access-9dctn\") pod \"certified-operators-gkx59\" (UID: \"b2b96b64-9445-48fe-bd76-e1a23f647129\") " pod="openshift-marketplace/certified-operators-gkx59" Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.377811 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mf465"] Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.379208 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mf465" Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.382290 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.390017 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mf465"] Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.451084 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frpjs\" (UniqueName: \"kubernetes.io/projected/d787629b-df65-406a-8890-2a65d18ce8fd-kube-api-access-frpjs\") pod \"community-operators-mf465\" (UID: \"d787629b-df65-406a-8890-2a65d18ce8fd\") " pod="openshift-marketplace/community-operators-mf465" Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.451146 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d787629b-df65-406a-8890-2a65d18ce8fd-catalog-content\") pod \"community-operators-mf465\" (UID: \"d787629b-df65-406a-8890-2a65d18ce8fd\") " pod="openshift-marketplace/community-operators-mf465" Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.451206 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d787629b-df65-406a-8890-2a65d18ce8fd-utilities\") pod \"community-operators-mf465\" (UID: \"d787629b-df65-406a-8890-2a65d18ce8fd\") " pod="openshift-marketplace/community-operators-mf465" Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.503208 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gkx59" Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.518886 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c9rvm" event={"ID":"51e1e87e-a127-4a53-9395-a32a304f638c","Type":"ContainerStarted","Data":"4eb12e8adabe16b39869d8eb7420cb818bfee76072e95cad831ab4c559af0c8b"} Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.520381 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t99sp" event={"ID":"e9691505-782f-4384-ae50-7cd13749bfde","Type":"ContainerStarted","Data":"87a0bcb79b68983033d5c779dc6df8bf08e764827bbc53fbb02a26cf82faccb2"} Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.552587 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frpjs\" (UniqueName: \"kubernetes.io/projected/d787629b-df65-406a-8890-2a65d18ce8fd-kube-api-access-frpjs\") pod \"community-operators-mf465\" (UID: \"d787629b-df65-406a-8890-2a65d18ce8fd\") " pod="openshift-marketplace/community-operators-mf465" Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.552646 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d787629b-df65-406a-8890-2a65d18ce8fd-catalog-content\") pod \"community-operators-mf465\" (UID: \"d787629b-df65-406a-8890-2a65d18ce8fd\") " pod="openshift-marketplace/community-operators-mf465" Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.552724 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d787629b-df65-406a-8890-2a65d18ce8fd-utilities\") pod \"community-operators-mf465\" (UID: \"d787629b-df65-406a-8890-2a65d18ce8fd\") " pod="openshift-marketplace/community-operators-mf465" Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.553267 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d787629b-df65-406a-8890-2a65d18ce8fd-utilities\") pod \"community-operators-mf465\" (UID: \"d787629b-df65-406a-8890-2a65d18ce8fd\") " pod="openshift-marketplace/community-operators-mf465" Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.553743 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d787629b-df65-406a-8890-2a65d18ce8fd-catalog-content\") pod \"community-operators-mf465\" (UID: \"d787629b-df65-406a-8890-2a65d18ce8fd\") " pod="openshift-marketplace/community-operators-mf465" Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.572499 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frpjs\" (UniqueName: \"kubernetes.io/projected/d787629b-df65-406a-8890-2a65d18ce8fd-kube-api-access-frpjs\") pod \"community-operators-mf465\" (UID: \"d787629b-df65-406a-8890-2a65d18ce8fd\") " pod="openshift-marketplace/community-operators-mf465" Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.689409 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gkx59"] Jan 30 10:59:18 crc kubenswrapper[4869]: I0130 10:59:18.716300 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mf465" Jan 30 10:59:19 crc kubenswrapper[4869]: I0130 10:59:19.098696 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mf465"] Jan 30 10:59:19 crc kubenswrapper[4869]: W0130 10:59:19.103437 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd787629b_df65_406a_8890_2a65d18ce8fd.slice/crio-59c96e9b3166d7d52633ef5564a1b653984f4e91341931d8ba25c58506487263 WatchSource:0}: Error finding container 59c96e9b3166d7d52633ef5564a1b653984f4e91341931d8ba25c58506487263: Status 404 returned error can't find the container with id 59c96e9b3166d7d52633ef5564a1b653984f4e91341931d8ba25c58506487263 Jan 30 10:59:19 crc kubenswrapper[4869]: I0130 10:59:19.532975 4869 generic.go:334] "Generic (PLEG): container finished" podID="b2b96b64-9445-48fe-bd76-e1a23f647129" containerID="1670700c387af61b7fee5b8fc3e91c4e87c92525747b207722b45b92852bb4fa" exitCode=0 Jan 30 10:59:19 crc kubenswrapper[4869]: I0130 10:59:19.533024 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gkx59" event={"ID":"b2b96b64-9445-48fe-bd76-e1a23f647129","Type":"ContainerDied","Data":"1670700c387af61b7fee5b8fc3e91c4e87c92525747b207722b45b92852bb4fa"} Jan 30 10:59:19 crc kubenswrapper[4869]: I0130 10:59:19.533301 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gkx59" event={"ID":"b2b96b64-9445-48fe-bd76-e1a23f647129","Type":"ContainerStarted","Data":"92c4d156359697dcd3a7a0f489194cf6cc56984ab72258065e5bf672069c294b"} Jan 30 10:59:19 crc kubenswrapper[4869]: I0130 10:59:19.538546 4869 generic.go:334] "Generic (PLEG): container finished" podID="e9691505-782f-4384-ae50-7cd13749bfde" containerID="87a0bcb79b68983033d5c779dc6df8bf08e764827bbc53fbb02a26cf82faccb2" exitCode=0 Jan 30 10:59:19 crc kubenswrapper[4869]: I0130 10:59:19.538625 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t99sp" event={"ID":"e9691505-782f-4384-ae50-7cd13749bfde","Type":"ContainerDied","Data":"87a0bcb79b68983033d5c779dc6df8bf08e764827bbc53fbb02a26cf82faccb2"} Jan 30 10:59:19 crc kubenswrapper[4869]: I0130 10:59:19.543363 4869 generic.go:334] "Generic (PLEG): container finished" podID="d787629b-df65-406a-8890-2a65d18ce8fd" containerID="a08e09cd2b31c27390229689245d58e5ca92b054e9e221015732e5e9b196c7aa" exitCode=0 Jan 30 10:59:19 crc kubenswrapper[4869]: I0130 10:59:19.543442 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mf465" event={"ID":"d787629b-df65-406a-8890-2a65d18ce8fd","Type":"ContainerDied","Data":"a08e09cd2b31c27390229689245d58e5ca92b054e9e221015732e5e9b196c7aa"} Jan 30 10:59:19 crc kubenswrapper[4869]: I0130 10:59:19.543466 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mf465" event={"ID":"d787629b-df65-406a-8890-2a65d18ce8fd","Type":"ContainerStarted","Data":"59c96e9b3166d7d52633ef5564a1b653984f4e91341931d8ba25c58506487263"} Jan 30 10:59:19 crc kubenswrapper[4869]: I0130 10:59:19.545938 4869 generic.go:334] "Generic (PLEG): container finished" podID="51e1e87e-a127-4a53-9395-a32a304f638c" containerID="4eb12e8adabe16b39869d8eb7420cb818bfee76072e95cad831ab4c559af0c8b" exitCode=0 Jan 30 10:59:19 crc kubenswrapper[4869]: I0130 10:59:19.545959 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c9rvm" event={"ID":"51e1e87e-a127-4a53-9395-a32a304f638c","Type":"ContainerDied","Data":"4eb12e8adabe16b39869d8eb7420cb818bfee76072e95cad831ab4c559af0c8b"} Jan 30 10:59:21 crc kubenswrapper[4869]: I0130 10:59:21.556373 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t99sp" event={"ID":"e9691505-782f-4384-ae50-7cd13749bfde","Type":"ContainerStarted","Data":"2c7d105419f588651666906318b49e050c05842f881f76e055637e057cdd5894"} Jan 30 10:59:21 crc kubenswrapper[4869]: I0130 10:59:21.561875 4869 generic.go:334] "Generic (PLEG): container finished" podID="d787629b-df65-406a-8890-2a65d18ce8fd" containerID="89ce3575e78f9de75cffd3894500c871aa3184234e56e172e376a8fa70896a2f" exitCode=0 Jan 30 10:59:21 crc kubenswrapper[4869]: I0130 10:59:21.562101 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mf465" event={"ID":"d787629b-df65-406a-8890-2a65d18ce8fd","Type":"ContainerDied","Data":"89ce3575e78f9de75cffd3894500c871aa3184234e56e172e376a8fa70896a2f"} Jan 30 10:59:21 crc kubenswrapper[4869]: I0130 10:59:21.564228 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c9rvm" event={"ID":"51e1e87e-a127-4a53-9395-a32a304f638c","Type":"ContainerStarted","Data":"0404094534ee99ad1d5cb91b2b42bc6f72fa21cddcda828d63db5bffcef4e1cb"} Jan 30 10:59:21 crc kubenswrapper[4869]: I0130 10:59:21.565967 4869 generic.go:334] "Generic (PLEG): container finished" podID="b2b96b64-9445-48fe-bd76-e1a23f647129" containerID="87696e74aa91afdde0fe3cb81e5c22b2c19da1574f458c27b49a5447fc416507" exitCode=0 Jan 30 10:59:21 crc kubenswrapper[4869]: I0130 10:59:21.566006 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gkx59" event={"ID":"b2b96b64-9445-48fe-bd76-e1a23f647129","Type":"ContainerDied","Data":"87696e74aa91afdde0fe3cb81e5c22b2c19da1574f458c27b49a5447fc416507"} Jan 30 10:59:21 crc kubenswrapper[4869]: I0130 10:59:21.579898 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-t99sp" podStartSLOduration=3.345261912 podStartE2EDuration="6.579880363s" podCreationTimestamp="2026-01-30 10:59:15 +0000 UTC" firstStartedPulling="2026-01-30 10:59:17.504589804 +0000 UTC m=+308.054465860" lastFinishedPulling="2026-01-30 10:59:20.739208245 +0000 UTC m=+311.289084311" observedRunningTime="2026-01-30 10:59:21.577863664 +0000 UTC m=+312.127739740" watchObservedRunningTime="2026-01-30 10:59:21.579880363 +0000 UTC m=+312.129756429" Jan 30 10:59:21 crc kubenswrapper[4869]: I0130 10:59:21.662147 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-c9rvm" podStartSLOduration=3.4444579 podStartE2EDuration="6.662131544s" podCreationTimestamp="2026-01-30 10:59:15 +0000 UTC" firstStartedPulling="2026-01-30 10:59:17.508837938 +0000 UTC m=+308.058714004" lastFinishedPulling="2026-01-30 10:59:20.726511582 +0000 UTC m=+311.276387648" observedRunningTime="2026-01-30 10:59:21.660757414 +0000 UTC m=+312.210633500" watchObservedRunningTime="2026-01-30 10:59:21.662131544 +0000 UTC m=+312.212007610" Jan 30 10:59:22 crc kubenswrapper[4869]: I0130 10:59:22.574195 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mf465" event={"ID":"d787629b-df65-406a-8890-2a65d18ce8fd","Type":"ContainerStarted","Data":"5df58c7873d8ba51751b4440a766d0d63747c38e123650bd77cb5cc32ae50c50"} Jan 30 10:59:22 crc kubenswrapper[4869]: I0130 10:59:22.577031 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gkx59" event={"ID":"b2b96b64-9445-48fe-bd76-e1a23f647129","Type":"ContainerStarted","Data":"b86d438bfa4d4aff9184dffb36982443152cc0466e0abddfc7c22e843e791240"} Jan 30 10:59:22 crc kubenswrapper[4869]: I0130 10:59:22.594850 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mf465" podStartSLOduration=2.170650034 podStartE2EDuration="4.594813411s" podCreationTimestamp="2026-01-30 10:59:18 +0000 UTC" firstStartedPulling="2026-01-30 10:59:19.544762983 +0000 UTC m=+310.094639049" lastFinishedPulling="2026-01-30 10:59:21.96892636 +0000 UTC m=+312.518802426" observedRunningTime="2026-01-30 10:59:22.592452572 +0000 UTC m=+313.142328638" watchObservedRunningTime="2026-01-30 10:59:22.594813411 +0000 UTC m=+313.144689497" Jan 30 10:59:22 crc kubenswrapper[4869]: I0130 10:59:22.611591 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gkx59" podStartSLOduration=2.104822423 podStartE2EDuration="4.611560602s" podCreationTimestamp="2026-01-30 10:59:18 +0000 UTC" firstStartedPulling="2026-01-30 10:59:19.534375898 +0000 UTC m=+310.084251964" lastFinishedPulling="2026-01-30 10:59:22.041114077 +0000 UTC m=+312.590990143" observedRunningTime="2026-01-30 10:59:22.609829512 +0000 UTC m=+313.159705588" watchObservedRunningTime="2026-01-30 10:59:22.611560602 +0000 UTC m=+313.161436688" Jan 30 10:59:22 crc kubenswrapper[4869]: I0130 10:59:22.674981 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 10:59:22 crc kubenswrapper[4869]: I0130 10:59:22.678933 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 10:59:23 crc kubenswrapper[4869]: I0130 10:59:23.592064 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 10:59:26 crc kubenswrapper[4869]: I0130 10:59:26.099985 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-c9rvm" Jan 30 10:59:26 crc kubenswrapper[4869]: I0130 10:59:26.100280 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-c9rvm" Jan 30 10:59:26 crc kubenswrapper[4869]: I0130 10:59:26.296597 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-t99sp" Jan 30 10:59:26 crc kubenswrapper[4869]: I0130 10:59:26.296639 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-t99sp" Jan 30 10:59:26 crc kubenswrapper[4869]: I0130 10:59:26.337624 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-t99sp" Jan 30 10:59:26 crc kubenswrapper[4869]: I0130 10:59:26.633196 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-t99sp" Jan 30 10:59:27 crc kubenswrapper[4869]: I0130 10:59:27.140294 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-c9rvm" podUID="51e1e87e-a127-4a53-9395-a32a304f638c" containerName="registry-server" probeResult="failure" output=< Jan 30 10:59:27 crc kubenswrapper[4869]: timeout: failed to connect service ":50051" within 1s Jan 30 10:59:27 crc kubenswrapper[4869]: > Jan 30 10:59:28 crc kubenswrapper[4869]: I0130 10:59:28.504426 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gkx59" Jan 30 10:59:28 crc kubenswrapper[4869]: I0130 10:59:28.504685 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gkx59" Jan 30 10:59:28 crc kubenswrapper[4869]: I0130 10:59:28.548425 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gkx59" Jan 30 10:59:28 crc kubenswrapper[4869]: I0130 10:59:28.638137 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gkx59" Jan 30 10:59:28 crc kubenswrapper[4869]: I0130 10:59:28.716840 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mf465" Jan 30 10:59:28 crc kubenswrapper[4869]: I0130 10:59:28.717422 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mf465" Jan 30 10:59:28 crc kubenswrapper[4869]: I0130 10:59:28.758671 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mf465" Jan 30 10:59:29 crc kubenswrapper[4869]: I0130 10:59:29.656811 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mf465" Jan 30 10:59:29 crc kubenswrapper[4869]: I0130 10:59:29.805177 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq"] Jan 30 10:59:29 crc kubenswrapper[4869]: I0130 10:59:29.805440 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq" podUID="3de467a9-b50e-4af7-816d-c346960a39af" containerName="route-controller-manager" containerID="cri-o://e55ee18451d7c591a630fde5b90ccddd9b1f5d46a1447b2d971b5590c350f068" gracePeriod=30 Jan 30 10:59:29 crc kubenswrapper[4869]: I0130 10:59:29.812561 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-pr488"] Jan 30 10:59:29 crc kubenswrapper[4869]: I0130 10:59:29.812787 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" podUID="e9ed3410-fb43-440e-8d7f-832850050d0c" containerName="controller-manager" containerID="cri-o://2f410cfb2e66d44ec332f4b004bf979a07ff726899e4fe564d553c777d70d03d" gracePeriod=30 Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.187723 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.289497 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5pbmn\" (UniqueName: \"kubernetes.io/projected/e9ed3410-fb43-440e-8d7f-832850050d0c-kube-api-access-5pbmn\") pod \"e9ed3410-fb43-440e-8d7f-832850050d0c\" (UID: \"e9ed3410-fb43-440e-8d7f-832850050d0c\") " Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.289536 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e9ed3410-fb43-440e-8d7f-832850050d0c-serving-cert\") pod \"e9ed3410-fb43-440e-8d7f-832850050d0c\" (UID: \"e9ed3410-fb43-440e-8d7f-832850050d0c\") " Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.289579 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9ed3410-fb43-440e-8d7f-832850050d0c-config\") pod \"e9ed3410-fb43-440e-8d7f-832850050d0c\" (UID: \"e9ed3410-fb43-440e-8d7f-832850050d0c\") " Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.289654 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e9ed3410-fb43-440e-8d7f-832850050d0c-client-ca\") pod \"e9ed3410-fb43-440e-8d7f-832850050d0c\" (UID: \"e9ed3410-fb43-440e-8d7f-832850050d0c\") " Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.289717 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e9ed3410-fb43-440e-8d7f-832850050d0c-proxy-ca-bundles\") pod \"e9ed3410-fb43-440e-8d7f-832850050d0c\" (UID: \"e9ed3410-fb43-440e-8d7f-832850050d0c\") " Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.290596 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9ed3410-fb43-440e-8d7f-832850050d0c-client-ca" (OuterVolumeSpecName: "client-ca") pod "e9ed3410-fb43-440e-8d7f-832850050d0c" (UID: "e9ed3410-fb43-440e-8d7f-832850050d0c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.290634 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9ed3410-fb43-440e-8d7f-832850050d0c-config" (OuterVolumeSpecName: "config") pod "e9ed3410-fb43-440e-8d7f-832850050d0c" (UID: "e9ed3410-fb43-440e-8d7f-832850050d0c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.290911 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9ed3410-fb43-440e-8d7f-832850050d0c-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "e9ed3410-fb43-440e-8d7f-832850050d0c" (UID: "e9ed3410-fb43-440e-8d7f-832850050d0c"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.296031 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9ed3410-fb43-440e-8d7f-832850050d0c-kube-api-access-5pbmn" (OuterVolumeSpecName: "kube-api-access-5pbmn") pod "e9ed3410-fb43-440e-8d7f-832850050d0c" (UID: "e9ed3410-fb43-440e-8d7f-832850050d0c"). InnerVolumeSpecName "kube-api-access-5pbmn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.296075 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9ed3410-fb43-440e-8d7f-832850050d0c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e9ed3410-fb43-440e-8d7f-832850050d0c" (UID: "e9ed3410-fb43-440e-8d7f-832850050d0c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.297145 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.391118 4869 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e9ed3410-fb43-440e-8d7f-832850050d0c-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.391150 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5pbmn\" (UniqueName: \"kubernetes.io/projected/e9ed3410-fb43-440e-8d7f-832850050d0c-kube-api-access-5pbmn\") on node \"crc\" DevicePath \"\"" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.391160 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e9ed3410-fb43-440e-8d7f-832850050d0c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.391169 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9ed3410-fb43-440e-8d7f-832850050d0c-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.391179 4869 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e9ed3410-fb43-440e-8d7f-832850050d0c-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.492358 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3de467a9-b50e-4af7-816d-c346960a39af-serving-cert\") pod \"3de467a9-b50e-4af7-816d-c346960a39af\" (UID: \"3de467a9-b50e-4af7-816d-c346960a39af\") " Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.492403 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3de467a9-b50e-4af7-816d-c346960a39af-client-ca\") pod \"3de467a9-b50e-4af7-816d-c346960a39af\" (UID: \"3de467a9-b50e-4af7-816d-c346960a39af\") " Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.492444 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3de467a9-b50e-4af7-816d-c346960a39af-config\") pod \"3de467a9-b50e-4af7-816d-c346960a39af\" (UID: \"3de467a9-b50e-4af7-816d-c346960a39af\") " Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.492498 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pd5xr\" (UniqueName: \"kubernetes.io/projected/3de467a9-b50e-4af7-816d-c346960a39af-kube-api-access-pd5xr\") pod \"3de467a9-b50e-4af7-816d-c346960a39af\" (UID: \"3de467a9-b50e-4af7-816d-c346960a39af\") " Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.493407 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3de467a9-b50e-4af7-816d-c346960a39af-client-ca" (OuterVolumeSpecName: "client-ca") pod "3de467a9-b50e-4af7-816d-c346960a39af" (UID: "3de467a9-b50e-4af7-816d-c346960a39af"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.493977 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3de467a9-b50e-4af7-816d-c346960a39af-config" (OuterVolumeSpecName: "config") pod "3de467a9-b50e-4af7-816d-c346960a39af" (UID: "3de467a9-b50e-4af7-816d-c346960a39af"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.495889 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3de467a9-b50e-4af7-816d-c346960a39af-kube-api-access-pd5xr" (OuterVolumeSpecName: "kube-api-access-pd5xr") pod "3de467a9-b50e-4af7-816d-c346960a39af" (UID: "3de467a9-b50e-4af7-816d-c346960a39af"). InnerVolumeSpecName "kube-api-access-pd5xr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.496726 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3de467a9-b50e-4af7-816d-c346960a39af-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "3de467a9-b50e-4af7-816d-c346960a39af" (UID: "3de467a9-b50e-4af7-816d-c346960a39af"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.593433 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3de467a9-b50e-4af7-816d-c346960a39af-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.593477 4869 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3de467a9-b50e-4af7-816d-c346960a39af-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.593489 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3de467a9-b50e-4af7-816d-c346960a39af-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.593503 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pd5xr\" (UniqueName: \"kubernetes.io/projected/3de467a9-b50e-4af7-816d-c346960a39af-kube-api-access-pd5xr\") on node \"crc\" DevicePath \"\"" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.615951 4869 generic.go:334] "Generic (PLEG): container finished" podID="e9ed3410-fb43-440e-8d7f-832850050d0c" containerID="2f410cfb2e66d44ec332f4b004bf979a07ff726899e4fe564d553c777d70d03d" exitCode=0 Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.616019 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.616026 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" event={"ID":"e9ed3410-fb43-440e-8d7f-832850050d0c","Type":"ContainerDied","Data":"2f410cfb2e66d44ec332f4b004bf979a07ff726899e4fe564d553c777d70d03d"} Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.616164 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-pr488" event={"ID":"e9ed3410-fb43-440e-8d7f-832850050d0c","Type":"ContainerDied","Data":"9ebfcef35d521001a4d6519fe42b8d659210bfe75a69e34bed7b215d9b87a094"} Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.616190 4869 scope.go:117] "RemoveContainer" containerID="2f410cfb2e66d44ec332f4b004bf979a07ff726899e4fe564d553c777d70d03d" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.618238 4869 generic.go:334] "Generic (PLEG): container finished" podID="3de467a9-b50e-4af7-816d-c346960a39af" containerID="e55ee18451d7c591a630fde5b90ccddd9b1f5d46a1447b2d971b5590c350f068" exitCode=0 Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.618805 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.618863 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq" event={"ID":"3de467a9-b50e-4af7-816d-c346960a39af","Type":"ContainerDied","Data":"e55ee18451d7c591a630fde5b90ccddd9b1f5d46a1447b2d971b5590c350f068"} Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.618913 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq" event={"ID":"3de467a9-b50e-4af7-816d-c346960a39af","Type":"ContainerDied","Data":"6732c8b176d0093e8ae8e04ac73613e947096ece699e2e95f9ecfd2c913cb614"} Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.634574 4869 scope.go:117] "RemoveContainer" containerID="2f410cfb2e66d44ec332f4b004bf979a07ff726899e4fe564d553c777d70d03d" Jan 30 10:59:30 crc kubenswrapper[4869]: E0130 10:59:30.634948 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f410cfb2e66d44ec332f4b004bf979a07ff726899e4fe564d553c777d70d03d\": container with ID starting with 2f410cfb2e66d44ec332f4b004bf979a07ff726899e4fe564d553c777d70d03d not found: ID does not exist" containerID="2f410cfb2e66d44ec332f4b004bf979a07ff726899e4fe564d553c777d70d03d" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.634980 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f410cfb2e66d44ec332f4b004bf979a07ff726899e4fe564d553c777d70d03d"} err="failed to get container status \"2f410cfb2e66d44ec332f4b004bf979a07ff726899e4fe564d553c777d70d03d\": rpc error: code = NotFound desc = could not find container \"2f410cfb2e66d44ec332f4b004bf979a07ff726899e4fe564d553c777d70d03d\": container with ID starting with 2f410cfb2e66d44ec332f4b004bf979a07ff726899e4fe564d553c777d70d03d not found: ID does not exist" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.635000 4869 scope.go:117] "RemoveContainer" containerID="e55ee18451d7c591a630fde5b90ccddd9b1f5d46a1447b2d971b5590c350f068" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.647155 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-pr488"] Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.648815 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-pr488"] Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.655765 4869 scope.go:117] "RemoveContainer" containerID="e55ee18451d7c591a630fde5b90ccddd9b1f5d46a1447b2d971b5590c350f068" Jan 30 10:59:30 crc kubenswrapper[4869]: E0130 10:59:30.656166 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e55ee18451d7c591a630fde5b90ccddd9b1f5d46a1447b2d971b5590c350f068\": container with ID starting with e55ee18451d7c591a630fde5b90ccddd9b1f5d46a1447b2d971b5590c350f068 not found: ID does not exist" containerID="e55ee18451d7c591a630fde5b90ccddd9b1f5d46a1447b2d971b5590c350f068" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.656196 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e55ee18451d7c591a630fde5b90ccddd9b1f5d46a1447b2d971b5590c350f068"} err="failed to get container status \"e55ee18451d7c591a630fde5b90ccddd9b1f5d46a1447b2d971b5590c350f068\": rpc error: code = NotFound desc = could not find container \"e55ee18451d7c591a630fde5b90ccddd9b1f5d46a1447b2d971b5590c350f068\": container with ID starting with e55ee18451d7c591a630fde5b90ccddd9b1f5d46a1447b2d971b5590c350f068 not found: ID does not exist" Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.658942 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq"] Jan 30 10:59:30 crc kubenswrapper[4869]: I0130 10:59:30.662339 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqgwq"] Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.808782 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj"] Jan 30 10:59:31 crc kubenswrapper[4869]: E0130 10:59:31.809208 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9ed3410-fb43-440e-8d7f-832850050d0c" containerName="controller-manager" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.809228 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9ed3410-fb43-440e-8d7f-832850050d0c" containerName="controller-manager" Jan 30 10:59:31 crc kubenswrapper[4869]: E0130 10:59:31.809243 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3de467a9-b50e-4af7-816d-c346960a39af" containerName="route-controller-manager" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.809250 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3de467a9-b50e-4af7-816d-c346960a39af" containerName="route-controller-manager" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.809435 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="3de467a9-b50e-4af7-816d-c346960a39af" containerName="route-controller-manager" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.809446 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9ed3410-fb43-440e-8d7f-832850050d0c" containerName="controller-manager" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.810102 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.811805 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-558b56cbd8-nldkr"] Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.812548 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.812645 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-558b56cbd8-nldkr" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.814292 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.814624 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.814817 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.814999 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.815072 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.815277 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.815492 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.815671 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.815867 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.815944 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.816154 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.820907 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj"] Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.825919 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.827656 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-558b56cbd8-nldkr"] Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.906774 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcca7a66-03a8-463d-8c33-0e31c1fcf15f-serving-cert\") pod \"controller-manager-558b56cbd8-nldkr\" (UID: \"dcca7a66-03a8-463d-8c33-0e31c1fcf15f\") " pod="openshift-controller-manager/controller-manager-558b56cbd8-nldkr" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.906832 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjpft\" (UniqueName: \"kubernetes.io/projected/d0c66598-1634-4ed1-b29d-e1fda5c874bb-kube-api-access-cjpft\") pod \"route-controller-manager-86c679cff5-99ksj\" (UID: \"d0c66598-1634-4ed1-b29d-e1fda5c874bb\") " pod="openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.906881 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/dcca7a66-03a8-463d-8c33-0e31c1fcf15f-proxy-ca-bundles\") pod \"controller-manager-558b56cbd8-nldkr\" (UID: \"dcca7a66-03a8-463d-8c33-0e31c1fcf15f\") " pod="openshift-controller-manager/controller-manager-558b56cbd8-nldkr" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.906904 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcca7a66-03a8-463d-8c33-0e31c1fcf15f-config\") pod \"controller-manager-558b56cbd8-nldkr\" (UID: \"dcca7a66-03a8-463d-8c33-0e31c1fcf15f\") " pod="openshift-controller-manager/controller-manager-558b56cbd8-nldkr" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.906932 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xk28v\" (UniqueName: \"kubernetes.io/projected/dcca7a66-03a8-463d-8c33-0e31c1fcf15f-kube-api-access-xk28v\") pod \"controller-manager-558b56cbd8-nldkr\" (UID: \"dcca7a66-03a8-463d-8c33-0e31c1fcf15f\") " pod="openshift-controller-manager/controller-manager-558b56cbd8-nldkr" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.907066 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dcca7a66-03a8-463d-8c33-0e31c1fcf15f-client-ca\") pod \"controller-manager-558b56cbd8-nldkr\" (UID: \"dcca7a66-03a8-463d-8c33-0e31c1fcf15f\") " pod="openshift-controller-manager/controller-manager-558b56cbd8-nldkr" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.907095 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0c66598-1634-4ed1-b29d-e1fda5c874bb-config\") pod \"route-controller-manager-86c679cff5-99ksj\" (UID: \"d0c66598-1634-4ed1-b29d-e1fda5c874bb\") " pod="openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.907122 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d0c66598-1634-4ed1-b29d-e1fda5c874bb-client-ca\") pod \"route-controller-manager-86c679cff5-99ksj\" (UID: \"d0c66598-1634-4ed1-b29d-e1fda5c874bb\") " pod="openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj" Jan 30 10:59:31 crc kubenswrapper[4869]: I0130 10:59:31.907214 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d0c66598-1634-4ed1-b29d-e1fda5c874bb-serving-cert\") pod \"route-controller-manager-86c679cff5-99ksj\" (UID: \"d0c66598-1634-4ed1-b29d-e1fda5c874bb\") " pod="openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.008362 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d0c66598-1634-4ed1-b29d-e1fda5c874bb-serving-cert\") pod \"route-controller-manager-86c679cff5-99ksj\" (UID: \"d0c66598-1634-4ed1-b29d-e1fda5c874bb\") " pod="openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.008433 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcca7a66-03a8-463d-8c33-0e31c1fcf15f-serving-cert\") pod \"controller-manager-558b56cbd8-nldkr\" (UID: \"dcca7a66-03a8-463d-8c33-0e31c1fcf15f\") " pod="openshift-controller-manager/controller-manager-558b56cbd8-nldkr" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.008462 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjpft\" (UniqueName: \"kubernetes.io/projected/d0c66598-1634-4ed1-b29d-e1fda5c874bb-kube-api-access-cjpft\") pod \"route-controller-manager-86c679cff5-99ksj\" (UID: \"d0c66598-1634-4ed1-b29d-e1fda5c874bb\") " pod="openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.008500 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/dcca7a66-03a8-463d-8c33-0e31c1fcf15f-proxy-ca-bundles\") pod \"controller-manager-558b56cbd8-nldkr\" (UID: \"dcca7a66-03a8-463d-8c33-0e31c1fcf15f\") " pod="openshift-controller-manager/controller-manager-558b56cbd8-nldkr" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.008526 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcca7a66-03a8-463d-8c33-0e31c1fcf15f-config\") pod \"controller-manager-558b56cbd8-nldkr\" (UID: \"dcca7a66-03a8-463d-8c33-0e31c1fcf15f\") " pod="openshift-controller-manager/controller-manager-558b56cbd8-nldkr" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.008551 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xk28v\" (UniqueName: \"kubernetes.io/projected/dcca7a66-03a8-463d-8c33-0e31c1fcf15f-kube-api-access-xk28v\") pod \"controller-manager-558b56cbd8-nldkr\" (UID: \"dcca7a66-03a8-463d-8c33-0e31c1fcf15f\") " pod="openshift-controller-manager/controller-manager-558b56cbd8-nldkr" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.008603 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dcca7a66-03a8-463d-8c33-0e31c1fcf15f-client-ca\") pod \"controller-manager-558b56cbd8-nldkr\" (UID: \"dcca7a66-03a8-463d-8c33-0e31c1fcf15f\") " pod="openshift-controller-manager/controller-manager-558b56cbd8-nldkr" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.008633 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0c66598-1634-4ed1-b29d-e1fda5c874bb-config\") pod \"route-controller-manager-86c679cff5-99ksj\" (UID: \"d0c66598-1634-4ed1-b29d-e1fda5c874bb\") " pod="openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.008657 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d0c66598-1634-4ed1-b29d-e1fda5c874bb-client-ca\") pod \"route-controller-manager-86c679cff5-99ksj\" (UID: \"d0c66598-1634-4ed1-b29d-e1fda5c874bb\") " pod="openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.009690 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d0c66598-1634-4ed1-b29d-e1fda5c874bb-client-ca\") pod \"route-controller-manager-86c679cff5-99ksj\" (UID: \"d0c66598-1634-4ed1-b29d-e1fda5c874bb\") " pod="openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.010116 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/dcca7a66-03a8-463d-8c33-0e31c1fcf15f-proxy-ca-bundles\") pod \"controller-manager-558b56cbd8-nldkr\" (UID: \"dcca7a66-03a8-463d-8c33-0e31c1fcf15f\") " pod="openshift-controller-manager/controller-manager-558b56cbd8-nldkr" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.010273 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dcca7a66-03a8-463d-8c33-0e31c1fcf15f-client-ca\") pod \"controller-manager-558b56cbd8-nldkr\" (UID: \"dcca7a66-03a8-463d-8c33-0e31c1fcf15f\") " pod="openshift-controller-manager/controller-manager-558b56cbd8-nldkr" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.010517 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0c66598-1634-4ed1-b29d-e1fda5c874bb-config\") pod \"route-controller-manager-86c679cff5-99ksj\" (UID: \"d0c66598-1634-4ed1-b29d-e1fda5c874bb\") " pod="openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.010589 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcca7a66-03a8-463d-8c33-0e31c1fcf15f-config\") pod \"controller-manager-558b56cbd8-nldkr\" (UID: \"dcca7a66-03a8-463d-8c33-0e31c1fcf15f\") " pod="openshift-controller-manager/controller-manager-558b56cbd8-nldkr" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.013866 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d0c66598-1634-4ed1-b29d-e1fda5c874bb-serving-cert\") pod \"route-controller-manager-86c679cff5-99ksj\" (UID: \"d0c66598-1634-4ed1-b29d-e1fda5c874bb\") " pod="openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.016224 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcca7a66-03a8-463d-8c33-0e31c1fcf15f-serving-cert\") pod \"controller-manager-558b56cbd8-nldkr\" (UID: \"dcca7a66-03a8-463d-8c33-0e31c1fcf15f\") " pod="openshift-controller-manager/controller-manager-558b56cbd8-nldkr" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.023487 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xk28v\" (UniqueName: \"kubernetes.io/projected/dcca7a66-03a8-463d-8c33-0e31c1fcf15f-kube-api-access-xk28v\") pod \"controller-manager-558b56cbd8-nldkr\" (UID: \"dcca7a66-03a8-463d-8c33-0e31c1fcf15f\") " pod="openshift-controller-manager/controller-manager-558b56cbd8-nldkr" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.024869 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjpft\" (UniqueName: \"kubernetes.io/projected/d0c66598-1634-4ed1-b29d-e1fda5c874bb-kube-api-access-cjpft\") pod \"route-controller-manager-86c679cff5-99ksj\" (UID: \"d0c66598-1634-4ed1-b29d-e1fda5c874bb\") " pod="openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.130059 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.138003 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-558b56cbd8-nldkr" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.138884 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3de467a9-b50e-4af7-816d-c346960a39af" path="/var/lib/kubelet/pods/3de467a9-b50e-4af7-816d-c346960a39af/volumes" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.139506 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9ed3410-fb43-440e-8d7f-832850050d0c" path="/var/lib/kubelet/pods/e9ed3410-fb43-440e-8d7f-832850050d0c/volumes" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.387153 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-558b56cbd8-nldkr"] Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.485988 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj"] Jan 30 10:59:32 crc kubenswrapper[4869]: W0130 10:59:32.498270 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd0c66598_1634_4ed1_b29d_e1fda5c874bb.slice/crio-bd3318318df85716f4c5a468b5948c7381b9e6474ceb2a799c07375cf547a741 WatchSource:0}: Error finding container bd3318318df85716f4c5a468b5948c7381b9e6474ceb2a799c07375cf547a741: Status 404 returned error can't find the container with id bd3318318df85716f4c5a468b5948c7381b9e6474ceb2a799c07375cf547a741 Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.631388 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj" event={"ID":"d0c66598-1634-4ed1-b29d-e1fda5c874bb","Type":"ContainerStarted","Data":"43c49c0ab2bbf4793117fe5aa4a8e343ac4832e02d9c2d49e333d7117c22a708"} Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.631439 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj" event={"ID":"d0c66598-1634-4ed1-b29d-e1fda5c874bb","Type":"ContainerStarted","Data":"bd3318318df85716f4c5a468b5948c7381b9e6474ceb2a799c07375cf547a741"} Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.632287 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.633569 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-558b56cbd8-nldkr" event={"ID":"dcca7a66-03a8-463d-8c33-0e31c1fcf15f","Type":"ContainerStarted","Data":"2ae8d447bbec3a43d5e707724e44dcf1f839da056f34ff23f1e7b4b40890e979"} Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.633690 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-558b56cbd8-nldkr" event={"ID":"dcca7a66-03a8-463d-8c33-0e31c1fcf15f","Type":"ContainerStarted","Data":"4cbb75b9dec4e21b02863e5e3ab13cd1512f086539bdc58718bef788fb0e8b63"} Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.633831 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-558b56cbd8-nldkr" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.633571 4869 patch_prober.go:28] interesting pod/route-controller-manager-86c679cff5-99ksj container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.63:8443/healthz\": dial tcp 10.217.0.63:8443: connect: connection refused" start-of-body= Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.634043 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj" podUID="d0c66598-1634-4ed1-b29d-e1fda5c874bb" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.63:8443/healthz\": dial tcp 10.217.0.63:8443: connect: connection refused" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.634558 4869 patch_prober.go:28] interesting pod/controller-manager-558b56cbd8-nldkr container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.64:8443/healthz\": dial tcp 10.217.0.64:8443: connect: connection refused" start-of-body= Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.634601 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-558b56cbd8-nldkr" podUID="dcca7a66-03a8-463d-8c33-0e31c1fcf15f" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.64:8443/healthz\": dial tcp 10.217.0.64:8443: connect: connection refused" Jan 30 10:59:32 crc kubenswrapper[4869]: I0130 10:59:32.649866 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj" podStartSLOduration=3.649846309 podStartE2EDuration="3.649846309s" podCreationTimestamp="2026-01-30 10:59:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:59:32.648964753 +0000 UTC m=+323.198840839" watchObservedRunningTime="2026-01-30 10:59:32.649846309 +0000 UTC m=+323.199722375" Jan 30 10:59:33 crc kubenswrapper[4869]: I0130 10:59:33.641315 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-558b56cbd8-nldkr" Jan 30 10:59:33 crc kubenswrapper[4869]: I0130 10:59:33.642200 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj" Jan 30 10:59:33 crc kubenswrapper[4869]: I0130 10:59:33.660879 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-558b56cbd8-nldkr" podStartSLOduration=4.660856042 podStartE2EDuration="4.660856042s" podCreationTimestamp="2026-01-30 10:59:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:59:32.665967251 +0000 UTC m=+323.215843317" watchObservedRunningTime="2026-01-30 10:59:33.660856042 +0000 UTC m=+324.210732108" Jan 30 10:59:36 crc kubenswrapper[4869]: I0130 10:59:36.142237 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-c9rvm" Jan 30 10:59:36 crc kubenswrapper[4869]: I0130 10:59:36.181364 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-c9rvm" Jan 30 10:59:42 crc kubenswrapper[4869]: I0130 10:59:42.834941 4869 patch_prober.go:28] interesting pod/package-server-manager-789f6589d5-f2zrw container/package-server-manager namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"http://10.217.0.29:8080/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 30 10:59:42 crc kubenswrapper[4869]: I0130 10:59:42.835314 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f2zrw" podUID="7c385c07-3f8d-4f69-a0c6-c4e3d6d141ee" containerName="package-server-manager" probeResult="failure" output="Get \"http://10.217.0.29:8080/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 30 10:59:42 crc kubenswrapper[4869]: I0130 10:59:42.834974 4869 patch_prober.go:28] interesting pod/package-server-manager-789f6589d5-f2zrw container/package-server-manager namespace/openshift-operator-lifecycle-manager: Liveness probe status=failure output="Get \"http://10.217.0.29:8080/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 30 10:59:42 crc kubenswrapper[4869]: I0130 10:59:42.835466 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f2zrw" podUID="7c385c07-3f8d-4f69-a0c6-c4e3d6d141ee" containerName="package-server-manager" probeResult="failure" output="Get \"http://10.217.0.29:8080/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 30 10:59:49 crc kubenswrapper[4869]: I0130 10:59:49.747387 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj"] Jan 30 10:59:49 crc kubenswrapper[4869]: I0130 10:59:49.748038 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj" podUID="d0c66598-1634-4ed1-b29d-e1fda5c874bb" containerName="route-controller-manager" containerID="cri-o://43c49c0ab2bbf4793117fe5aa4a8e343ac4832e02d9c2d49e333d7117c22a708" gracePeriod=30 Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.266275 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj" Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.439094 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0c66598-1634-4ed1-b29d-e1fda5c874bb-config\") pod \"d0c66598-1634-4ed1-b29d-e1fda5c874bb\" (UID: \"d0c66598-1634-4ed1-b29d-e1fda5c874bb\") " Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.439547 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d0c66598-1634-4ed1-b29d-e1fda5c874bb-client-ca\") pod \"d0c66598-1634-4ed1-b29d-e1fda5c874bb\" (UID: \"d0c66598-1634-4ed1-b29d-e1fda5c874bb\") " Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.439662 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d0c66598-1634-4ed1-b29d-e1fda5c874bb-serving-cert\") pod \"d0c66598-1634-4ed1-b29d-e1fda5c874bb\" (UID: \"d0c66598-1634-4ed1-b29d-e1fda5c874bb\") " Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.439730 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cjpft\" (UniqueName: \"kubernetes.io/projected/d0c66598-1634-4ed1-b29d-e1fda5c874bb-kube-api-access-cjpft\") pod \"d0c66598-1634-4ed1-b29d-e1fda5c874bb\" (UID: \"d0c66598-1634-4ed1-b29d-e1fda5c874bb\") " Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.440763 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0c66598-1634-4ed1-b29d-e1fda5c874bb-config" (OuterVolumeSpecName: "config") pod "d0c66598-1634-4ed1-b29d-e1fda5c874bb" (UID: "d0c66598-1634-4ed1-b29d-e1fda5c874bb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.440934 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0c66598-1634-4ed1-b29d-e1fda5c874bb-client-ca" (OuterVolumeSpecName: "client-ca") pod "d0c66598-1634-4ed1-b29d-e1fda5c874bb" (UID: "d0c66598-1634-4ed1-b29d-e1fda5c874bb"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.447984 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0c66598-1634-4ed1-b29d-e1fda5c874bb-kube-api-access-cjpft" (OuterVolumeSpecName: "kube-api-access-cjpft") pod "d0c66598-1634-4ed1-b29d-e1fda5c874bb" (UID: "d0c66598-1634-4ed1-b29d-e1fda5c874bb"). InnerVolumeSpecName "kube-api-access-cjpft". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.448102 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0c66598-1634-4ed1-b29d-e1fda5c874bb-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "d0c66598-1634-4ed1-b29d-e1fda5c874bb" (UID: "d0c66598-1634-4ed1-b29d-e1fda5c874bb"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.540745 4869 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d0c66598-1634-4ed1-b29d-e1fda5c874bb-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.540790 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d0c66598-1634-4ed1-b29d-e1fda5c874bb-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.540804 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cjpft\" (UniqueName: \"kubernetes.io/projected/d0c66598-1634-4ed1-b29d-e1fda5c874bb-kube-api-access-cjpft\") on node \"crc\" DevicePath \"\"" Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.540819 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0c66598-1634-4ed1-b29d-e1fda5c874bb-config\") on node \"crc\" DevicePath \"\"" Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.732236 4869 generic.go:334] "Generic (PLEG): container finished" podID="d0c66598-1634-4ed1-b29d-e1fda5c874bb" containerID="43c49c0ab2bbf4793117fe5aa4a8e343ac4832e02d9c2d49e333d7117c22a708" exitCode=0 Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.732316 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj" event={"ID":"d0c66598-1634-4ed1-b29d-e1fda5c874bb","Type":"ContainerDied","Data":"43c49c0ab2bbf4793117fe5aa4a8e343ac4832e02d9c2d49e333d7117c22a708"} Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.732345 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj" Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.732381 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj" event={"ID":"d0c66598-1634-4ed1-b29d-e1fda5c874bb","Type":"ContainerDied","Data":"bd3318318df85716f4c5a468b5948c7381b9e6474ceb2a799c07375cf547a741"} Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.732404 4869 scope.go:117] "RemoveContainer" containerID="43c49c0ab2bbf4793117fe5aa4a8e343ac4832e02d9c2d49e333d7117c22a708" Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.751194 4869 scope.go:117] "RemoveContainer" containerID="43c49c0ab2bbf4793117fe5aa4a8e343ac4832e02d9c2d49e333d7117c22a708" Jan 30 10:59:50 crc kubenswrapper[4869]: E0130 10:59:50.751649 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43c49c0ab2bbf4793117fe5aa4a8e343ac4832e02d9c2d49e333d7117c22a708\": container with ID starting with 43c49c0ab2bbf4793117fe5aa4a8e343ac4832e02d9c2d49e333d7117c22a708 not found: ID does not exist" containerID="43c49c0ab2bbf4793117fe5aa4a8e343ac4832e02d9c2d49e333d7117c22a708" Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.751678 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43c49c0ab2bbf4793117fe5aa4a8e343ac4832e02d9c2d49e333d7117c22a708"} err="failed to get container status \"43c49c0ab2bbf4793117fe5aa4a8e343ac4832e02d9c2d49e333d7117c22a708\": rpc error: code = NotFound desc = could not find container \"43c49c0ab2bbf4793117fe5aa4a8e343ac4832e02d9c2d49e333d7117c22a708\": container with ID starting with 43c49c0ab2bbf4793117fe5aa4a8e343ac4832e02d9c2d49e333d7117c22a708 not found: ID does not exist" Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.763599 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj"] Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.767857 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-86c679cff5-99ksj"] Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.977186 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-774f9c898d-w977f"] Jan 30 10:59:50 crc kubenswrapper[4869]: E0130 10:59:50.977405 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0c66598-1634-4ed1-b29d-e1fda5c874bb" containerName="route-controller-manager" Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.977415 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0c66598-1634-4ed1-b29d-e1fda5c874bb" containerName="route-controller-manager" Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.977496 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0c66598-1634-4ed1-b29d-e1fda5c874bb" containerName="route-controller-manager" Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.977853 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-774f9c898d-w977f" Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.980899 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.980904 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.981199 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.981001 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.982440 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.984334 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 30 10:59:50 crc kubenswrapper[4869]: I0130 10:59:50.991247 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-774f9c898d-w977f"] Jan 30 10:59:51 crc kubenswrapper[4869]: I0130 10:59:51.147414 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/77e73071-cb67-48b5-8cba-46d4b704f67f-serving-cert\") pod \"route-controller-manager-774f9c898d-w977f\" (UID: \"77e73071-cb67-48b5-8cba-46d4b704f67f\") " pod="openshift-route-controller-manager/route-controller-manager-774f9c898d-w977f" Jan 30 10:59:51 crc kubenswrapper[4869]: I0130 10:59:51.147482 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvmfj\" (UniqueName: \"kubernetes.io/projected/77e73071-cb67-48b5-8cba-46d4b704f67f-kube-api-access-gvmfj\") pod \"route-controller-manager-774f9c898d-w977f\" (UID: \"77e73071-cb67-48b5-8cba-46d4b704f67f\") " pod="openshift-route-controller-manager/route-controller-manager-774f9c898d-w977f" Jan 30 10:59:51 crc kubenswrapper[4869]: I0130 10:59:51.147514 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77e73071-cb67-48b5-8cba-46d4b704f67f-config\") pod \"route-controller-manager-774f9c898d-w977f\" (UID: \"77e73071-cb67-48b5-8cba-46d4b704f67f\") " pod="openshift-route-controller-manager/route-controller-manager-774f9c898d-w977f" Jan 30 10:59:51 crc kubenswrapper[4869]: I0130 10:59:51.147584 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/77e73071-cb67-48b5-8cba-46d4b704f67f-client-ca\") pod \"route-controller-manager-774f9c898d-w977f\" (UID: \"77e73071-cb67-48b5-8cba-46d4b704f67f\") " pod="openshift-route-controller-manager/route-controller-manager-774f9c898d-w977f" Jan 30 10:59:51 crc kubenswrapper[4869]: I0130 10:59:51.248761 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvmfj\" (UniqueName: \"kubernetes.io/projected/77e73071-cb67-48b5-8cba-46d4b704f67f-kube-api-access-gvmfj\") pod \"route-controller-manager-774f9c898d-w977f\" (UID: \"77e73071-cb67-48b5-8cba-46d4b704f67f\") " pod="openshift-route-controller-manager/route-controller-manager-774f9c898d-w977f" Jan 30 10:59:51 crc kubenswrapper[4869]: I0130 10:59:51.248833 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77e73071-cb67-48b5-8cba-46d4b704f67f-config\") pod \"route-controller-manager-774f9c898d-w977f\" (UID: \"77e73071-cb67-48b5-8cba-46d4b704f67f\") " pod="openshift-route-controller-manager/route-controller-manager-774f9c898d-w977f" Jan 30 10:59:51 crc kubenswrapper[4869]: I0130 10:59:51.248878 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/77e73071-cb67-48b5-8cba-46d4b704f67f-client-ca\") pod \"route-controller-manager-774f9c898d-w977f\" (UID: \"77e73071-cb67-48b5-8cba-46d4b704f67f\") " pod="openshift-route-controller-manager/route-controller-manager-774f9c898d-w977f" Jan 30 10:59:51 crc kubenswrapper[4869]: I0130 10:59:51.248916 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/77e73071-cb67-48b5-8cba-46d4b704f67f-serving-cert\") pod \"route-controller-manager-774f9c898d-w977f\" (UID: \"77e73071-cb67-48b5-8cba-46d4b704f67f\") " pod="openshift-route-controller-manager/route-controller-manager-774f9c898d-w977f" Jan 30 10:59:51 crc kubenswrapper[4869]: I0130 10:59:51.250238 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77e73071-cb67-48b5-8cba-46d4b704f67f-config\") pod \"route-controller-manager-774f9c898d-w977f\" (UID: \"77e73071-cb67-48b5-8cba-46d4b704f67f\") " pod="openshift-route-controller-manager/route-controller-manager-774f9c898d-w977f" Jan 30 10:59:51 crc kubenswrapper[4869]: I0130 10:59:51.250334 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/77e73071-cb67-48b5-8cba-46d4b704f67f-client-ca\") pod \"route-controller-manager-774f9c898d-w977f\" (UID: \"77e73071-cb67-48b5-8cba-46d4b704f67f\") " pod="openshift-route-controller-manager/route-controller-manager-774f9c898d-w977f" Jan 30 10:59:51 crc kubenswrapper[4869]: I0130 10:59:51.254454 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/77e73071-cb67-48b5-8cba-46d4b704f67f-serving-cert\") pod \"route-controller-manager-774f9c898d-w977f\" (UID: \"77e73071-cb67-48b5-8cba-46d4b704f67f\") " pod="openshift-route-controller-manager/route-controller-manager-774f9c898d-w977f" Jan 30 10:59:51 crc kubenswrapper[4869]: I0130 10:59:51.268147 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvmfj\" (UniqueName: \"kubernetes.io/projected/77e73071-cb67-48b5-8cba-46d4b704f67f-kube-api-access-gvmfj\") pod \"route-controller-manager-774f9c898d-w977f\" (UID: \"77e73071-cb67-48b5-8cba-46d4b704f67f\") " pod="openshift-route-controller-manager/route-controller-manager-774f9c898d-w977f" Jan 30 10:59:51 crc kubenswrapper[4869]: I0130 10:59:51.307600 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-774f9c898d-w977f" Jan 30 10:59:51 crc kubenswrapper[4869]: I0130 10:59:51.710314 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-774f9c898d-w977f"] Jan 30 10:59:51 crc kubenswrapper[4869]: I0130 10:59:51.743407 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-774f9c898d-w977f" event={"ID":"77e73071-cb67-48b5-8cba-46d4b704f67f","Type":"ContainerStarted","Data":"0f30a3cb1571ed64a00e523d79d1aec018ebc9a7e1157bd75fac53f62bb828c4"} Jan 30 10:59:52 crc kubenswrapper[4869]: I0130 10:59:52.138987 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0c66598-1634-4ed1-b29d-e1fda5c874bb" path="/var/lib/kubelet/pods/d0c66598-1634-4ed1-b29d-e1fda5c874bb/volumes" Jan 30 10:59:52 crc kubenswrapper[4869]: I0130 10:59:52.753778 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-774f9c898d-w977f" event={"ID":"77e73071-cb67-48b5-8cba-46d4b704f67f","Type":"ContainerStarted","Data":"5832e17a198c1141caf03924b3ad02bb64bebc37ed81394b09d0ac93b002c4f7"} Jan 30 10:59:52 crc kubenswrapper[4869]: I0130 10:59:52.754392 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-774f9c898d-w977f" Jan 30 10:59:52 crc kubenswrapper[4869]: I0130 10:59:52.763698 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-774f9c898d-w977f" Jan 30 10:59:52 crc kubenswrapper[4869]: I0130 10:59:52.782917 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-774f9c898d-w977f" podStartSLOduration=3.782879956 podStartE2EDuration="3.782879956s" podCreationTimestamp="2026-01-30 10:59:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 10:59:52.777809019 +0000 UTC m=+343.327685085" watchObservedRunningTime="2026-01-30 10:59:52.782879956 +0000 UTC m=+343.332756062" Jan 30 11:00:00 crc kubenswrapper[4869]: I0130 11:00:00.167407 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496180-bjlvf"] Jan 30 11:00:00 crc kubenswrapper[4869]: I0130 11:00:00.168754 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496180-bjlvf" Jan 30 11:00:00 crc kubenswrapper[4869]: I0130 11:00:00.171148 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 30 11:00:00 crc kubenswrapper[4869]: I0130 11:00:00.171947 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 30 11:00:00 crc kubenswrapper[4869]: I0130 11:00:00.176519 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496180-bjlvf"] Jan 30 11:00:00 crc kubenswrapper[4869]: I0130 11:00:00.369035 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b980241e-5870-4e34-af67-379e2470fb36-secret-volume\") pod \"collect-profiles-29496180-bjlvf\" (UID: \"b980241e-5870-4e34-af67-379e2470fb36\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496180-bjlvf" Jan 30 11:00:00 crc kubenswrapper[4869]: I0130 11:00:00.369234 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b980241e-5870-4e34-af67-379e2470fb36-config-volume\") pod \"collect-profiles-29496180-bjlvf\" (UID: \"b980241e-5870-4e34-af67-379e2470fb36\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496180-bjlvf" Jan 30 11:00:00 crc kubenswrapper[4869]: I0130 11:00:00.369362 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zll7m\" (UniqueName: \"kubernetes.io/projected/b980241e-5870-4e34-af67-379e2470fb36-kube-api-access-zll7m\") pod \"collect-profiles-29496180-bjlvf\" (UID: \"b980241e-5870-4e34-af67-379e2470fb36\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496180-bjlvf" Jan 30 11:00:00 crc kubenswrapper[4869]: I0130 11:00:00.470336 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b980241e-5870-4e34-af67-379e2470fb36-config-volume\") pod \"collect-profiles-29496180-bjlvf\" (UID: \"b980241e-5870-4e34-af67-379e2470fb36\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496180-bjlvf" Jan 30 11:00:00 crc kubenswrapper[4869]: I0130 11:00:00.470412 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zll7m\" (UniqueName: \"kubernetes.io/projected/b980241e-5870-4e34-af67-379e2470fb36-kube-api-access-zll7m\") pod \"collect-profiles-29496180-bjlvf\" (UID: \"b980241e-5870-4e34-af67-379e2470fb36\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496180-bjlvf" Jan 30 11:00:00 crc kubenswrapper[4869]: I0130 11:00:00.470473 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b980241e-5870-4e34-af67-379e2470fb36-secret-volume\") pod \"collect-profiles-29496180-bjlvf\" (UID: \"b980241e-5870-4e34-af67-379e2470fb36\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496180-bjlvf" Jan 30 11:00:00 crc kubenswrapper[4869]: I0130 11:00:00.471461 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b980241e-5870-4e34-af67-379e2470fb36-config-volume\") pod \"collect-profiles-29496180-bjlvf\" (UID: \"b980241e-5870-4e34-af67-379e2470fb36\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496180-bjlvf" Jan 30 11:00:00 crc kubenswrapper[4869]: I0130 11:00:00.479450 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b980241e-5870-4e34-af67-379e2470fb36-secret-volume\") pod \"collect-profiles-29496180-bjlvf\" (UID: \"b980241e-5870-4e34-af67-379e2470fb36\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496180-bjlvf" Jan 30 11:00:00 crc kubenswrapper[4869]: I0130 11:00:00.488165 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zll7m\" (UniqueName: \"kubernetes.io/projected/b980241e-5870-4e34-af67-379e2470fb36-kube-api-access-zll7m\") pod \"collect-profiles-29496180-bjlvf\" (UID: \"b980241e-5870-4e34-af67-379e2470fb36\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496180-bjlvf" Jan 30 11:00:00 crc kubenswrapper[4869]: I0130 11:00:00.490045 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496180-bjlvf" Jan 30 11:00:00 crc kubenswrapper[4869]: I0130 11:00:00.864378 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496180-bjlvf"] Jan 30 11:00:01 crc kubenswrapper[4869]: I0130 11:00:01.798193 4869 generic.go:334] "Generic (PLEG): container finished" podID="b980241e-5870-4e34-af67-379e2470fb36" containerID="cc060fb634eddcda6fb706c2a7dcdb30c253af51f7878c50bdbf547b343700e3" exitCode=0 Jan 30 11:00:01 crc kubenswrapper[4869]: I0130 11:00:01.798255 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496180-bjlvf" event={"ID":"b980241e-5870-4e34-af67-379e2470fb36","Type":"ContainerDied","Data":"cc060fb634eddcda6fb706c2a7dcdb30c253af51f7878c50bdbf547b343700e3"} Jan 30 11:00:01 crc kubenswrapper[4869]: I0130 11:00:01.798613 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496180-bjlvf" event={"ID":"b980241e-5870-4e34-af67-379e2470fb36","Type":"ContainerStarted","Data":"3b03ec2a987107d10ee8734ee7f1f87430964bc57af1e6f2d50caa01960456df"} Jan 30 11:00:03 crc kubenswrapper[4869]: I0130 11:00:03.112529 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496180-bjlvf" Jan 30 11:00:03 crc kubenswrapper[4869]: I0130 11:00:03.305827 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b980241e-5870-4e34-af67-379e2470fb36-config-volume\") pod \"b980241e-5870-4e34-af67-379e2470fb36\" (UID: \"b980241e-5870-4e34-af67-379e2470fb36\") " Jan 30 11:00:03 crc kubenswrapper[4869]: I0130 11:00:03.306219 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zll7m\" (UniqueName: \"kubernetes.io/projected/b980241e-5870-4e34-af67-379e2470fb36-kube-api-access-zll7m\") pod \"b980241e-5870-4e34-af67-379e2470fb36\" (UID: \"b980241e-5870-4e34-af67-379e2470fb36\") " Jan 30 11:00:03 crc kubenswrapper[4869]: I0130 11:00:03.306903 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b980241e-5870-4e34-af67-379e2470fb36-config-volume" (OuterVolumeSpecName: "config-volume") pod "b980241e-5870-4e34-af67-379e2470fb36" (UID: "b980241e-5870-4e34-af67-379e2470fb36"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:00:03 crc kubenswrapper[4869]: I0130 11:00:03.307037 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b980241e-5870-4e34-af67-379e2470fb36-secret-volume\") pod \"b980241e-5870-4e34-af67-379e2470fb36\" (UID: \"b980241e-5870-4e34-af67-379e2470fb36\") " Jan 30 11:00:03 crc kubenswrapper[4869]: I0130 11:00:03.307219 4869 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b980241e-5870-4e34-af67-379e2470fb36-config-volume\") on node \"crc\" DevicePath \"\"" Jan 30 11:00:03 crc kubenswrapper[4869]: I0130 11:00:03.311100 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b980241e-5870-4e34-af67-379e2470fb36-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b980241e-5870-4e34-af67-379e2470fb36" (UID: "b980241e-5870-4e34-af67-379e2470fb36"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:00:03 crc kubenswrapper[4869]: I0130 11:00:03.311980 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b980241e-5870-4e34-af67-379e2470fb36-kube-api-access-zll7m" (OuterVolumeSpecName: "kube-api-access-zll7m") pod "b980241e-5870-4e34-af67-379e2470fb36" (UID: "b980241e-5870-4e34-af67-379e2470fb36"). InnerVolumeSpecName "kube-api-access-zll7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:00:03 crc kubenswrapper[4869]: I0130 11:00:03.407574 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zll7m\" (UniqueName: \"kubernetes.io/projected/b980241e-5870-4e34-af67-379e2470fb36-kube-api-access-zll7m\") on node \"crc\" DevicePath \"\"" Jan 30 11:00:03 crc kubenswrapper[4869]: I0130 11:00:03.407937 4869 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b980241e-5870-4e34-af67-379e2470fb36-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 30 11:00:03 crc kubenswrapper[4869]: I0130 11:00:03.809616 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496180-bjlvf" event={"ID":"b980241e-5870-4e34-af67-379e2470fb36","Type":"ContainerDied","Data":"3b03ec2a987107d10ee8734ee7f1f87430964bc57af1e6f2d50caa01960456df"} Jan 30 11:00:03 crc kubenswrapper[4869]: I0130 11:00:03.809665 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3b03ec2a987107d10ee8734ee7f1f87430964bc57af1e6f2d50caa01960456df" Jan 30 11:00:03 crc kubenswrapper[4869]: I0130 11:00:03.809702 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496180-bjlvf" Jan 30 11:00:20 crc kubenswrapper[4869]: I0130 11:00:20.911621 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-slhz9"] Jan 30 11:00:20 crc kubenswrapper[4869]: E0130 11:00:20.913741 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b980241e-5870-4e34-af67-379e2470fb36" containerName="collect-profiles" Jan 30 11:00:20 crc kubenswrapper[4869]: I0130 11:00:20.913840 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b980241e-5870-4e34-af67-379e2470fb36" containerName="collect-profiles" Jan 30 11:00:20 crc kubenswrapper[4869]: I0130 11:00:20.914000 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b980241e-5870-4e34-af67-379e2470fb36" containerName="collect-profiles" Jan 30 11:00:20 crc kubenswrapper[4869]: I0130 11:00:20.914531 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:20 crc kubenswrapper[4869]: I0130 11:00:20.930153 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xct6p\" (UniqueName: \"kubernetes.io/projected/19f2195d-92a1-4069-8e26-5a171972b47b-kube-api-access-xct6p\") pod \"image-registry-66df7c8f76-slhz9\" (UID: \"19f2195d-92a1-4069-8e26-5a171972b47b\") " pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:20 crc kubenswrapper[4869]: I0130 11:00:20.930202 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/19f2195d-92a1-4069-8e26-5a171972b47b-bound-sa-token\") pod \"image-registry-66df7c8f76-slhz9\" (UID: \"19f2195d-92a1-4069-8e26-5a171972b47b\") " pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:20 crc kubenswrapper[4869]: I0130 11:00:20.930222 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/19f2195d-92a1-4069-8e26-5a171972b47b-trusted-ca\") pod \"image-registry-66df7c8f76-slhz9\" (UID: \"19f2195d-92a1-4069-8e26-5a171972b47b\") " pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:20 crc kubenswrapper[4869]: I0130 11:00:20.930249 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/19f2195d-92a1-4069-8e26-5a171972b47b-registry-certificates\") pod \"image-registry-66df7c8f76-slhz9\" (UID: \"19f2195d-92a1-4069-8e26-5a171972b47b\") " pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:20 crc kubenswrapper[4869]: I0130 11:00:20.930329 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-slhz9\" (UID: \"19f2195d-92a1-4069-8e26-5a171972b47b\") " pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:20 crc kubenswrapper[4869]: I0130 11:00:20.930419 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/19f2195d-92a1-4069-8e26-5a171972b47b-installation-pull-secrets\") pod \"image-registry-66df7c8f76-slhz9\" (UID: \"19f2195d-92a1-4069-8e26-5a171972b47b\") " pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:20 crc kubenswrapper[4869]: I0130 11:00:20.930445 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/19f2195d-92a1-4069-8e26-5a171972b47b-ca-trust-extracted\") pod \"image-registry-66df7c8f76-slhz9\" (UID: \"19f2195d-92a1-4069-8e26-5a171972b47b\") " pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:20 crc kubenswrapper[4869]: I0130 11:00:20.930475 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/19f2195d-92a1-4069-8e26-5a171972b47b-registry-tls\") pod \"image-registry-66df7c8f76-slhz9\" (UID: \"19f2195d-92a1-4069-8e26-5a171972b47b\") " pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:20 crc kubenswrapper[4869]: I0130 11:00:20.936206 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-slhz9"] Jan 30 11:00:20 crc kubenswrapper[4869]: I0130 11:00:20.957894 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-slhz9\" (UID: \"19f2195d-92a1-4069-8e26-5a171972b47b\") " pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:21 crc kubenswrapper[4869]: I0130 11:00:21.031271 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/19f2195d-92a1-4069-8e26-5a171972b47b-installation-pull-secrets\") pod \"image-registry-66df7c8f76-slhz9\" (UID: \"19f2195d-92a1-4069-8e26-5a171972b47b\") " pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:21 crc kubenswrapper[4869]: I0130 11:00:21.031325 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/19f2195d-92a1-4069-8e26-5a171972b47b-ca-trust-extracted\") pod \"image-registry-66df7c8f76-slhz9\" (UID: \"19f2195d-92a1-4069-8e26-5a171972b47b\") " pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:21 crc kubenswrapper[4869]: I0130 11:00:21.031373 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/19f2195d-92a1-4069-8e26-5a171972b47b-registry-tls\") pod \"image-registry-66df7c8f76-slhz9\" (UID: \"19f2195d-92a1-4069-8e26-5a171972b47b\") " pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:21 crc kubenswrapper[4869]: I0130 11:00:21.031407 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xct6p\" (UniqueName: \"kubernetes.io/projected/19f2195d-92a1-4069-8e26-5a171972b47b-kube-api-access-xct6p\") pod \"image-registry-66df7c8f76-slhz9\" (UID: \"19f2195d-92a1-4069-8e26-5a171972b47b\") " pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:21 crc kubenswrapper[4869]: I0130 11:00:21.031446 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/19f2195d-92a1-4069-8e26-5a171972b47b-bound-sa-token\") pod \"image-registry-66df7c8f76-slhz9\" (UID: \"19f2195d-92a1-4069-8e26-5a171972b47b\") " pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:21 crc kubenswrapper[4869]: I0130 11:00:21.031473 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/19f2195d-92a1-4069-8e26-5a171972b47b-trusted-ca\") pod \"image-registry-66df7c8f76-slhz9\" (UID: \"19f2195d-92a1-4069-8e26-5a171972b47b\") " pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:21 crc kubenswrapper[4869]: I0130 11:00:21.031497 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/19f2195d-92a1-4069-8e26-5a171972b47b-registry-certificates\") pod \"image-registry-66df7c8f76-slhz9\" (UID: \"19f2195d-92a1-4069-8e26-5a171972b47b\") " pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:21 crc kubenswrapper[4869]: I0130 11:00:21.031993 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/19f2195d-92a1-4069-8e26-5a171972b47b-ca-trust-extracted\") pod \"image-registry-66df7c8f76-slhz9\" (UID: \"19f2195d-92a1-4069-8e26-5a171972b47b\") " pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:21 crc kubenswrapper[4869]: I0130 11:00:21.033167 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/19f2195d-92a1-4069-8e26-5a171972b47b-trusted-ca\") pod \"image-registry-66df7c8f76-slhz9\" (UID: \"19f2195d-92a1-4069-8e26-5a171972b47b\") " pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:21 crc kubenswrapper[4869]: I0130 11:00:21.033279 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/19f2195d-92a1-4069-8e26-5a171972b47b-registry-certificates\") pod \"image-registry-66df7c8f76-slhz9\" (UID: \"19f2195d-92a1-4069-8e26-5a171972b47b\") " pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:21 crc kubenswrapper[4869]: I0130 11:00:21.039241 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/19f2195d-92a1-4069-8e26-5a171972b47b-installation-pull-secrets\") pod \"image-registry-66df7c8f76-slhz9\" (UID: \"19f2195d-92a1-4069-8e26-5a171972b47b\") " pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:21 crc kubenswrapper[4869]: I0130 11:00:21.043444 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/19f2195d-92a1-4069-8e26-5a171972b47b-registry-tls\") pod \"image-registry-66df7c8f76-slhz9\" (UID: \"19f2195d-92a1-4069-8e26-5a171972b47b\") " pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:21 crc kubenswrapper[4869]: I0130 11:00:21.049654 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/19f2195d-92a1-4069-8e26-5a171972b47b-bound-sa-token\") pod \"image-registry-66df7c8f76-slhz9\" (UID: \"19f2195d-92a1-4069-8e26-5a171972b47b\") " pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:21 crc kubenswrapper[4869]: I0130 11:00:21.054104 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xct6p\" (UniqueName: \"kubernetes.io/projected/19f2195d-92a1-4069-8e26-5a171972b47b-kube-api-access-xct6p\") pod \"image-registry-66df7c8f76-slhz9\" (UID: \"19f2195d-92a1-4069-8e26-5a171972b47b\") " pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:21 crc kubenswrapper[4869]: I0130 11:00:21.238892 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:21 crc kubenswrapper[4869]: I0130 11:00:21.651817 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-slhz9"] Jan 30 11:00:21 crc kubenswrapper[4869]: W0130 11:00:21.660231 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19f2195d_92a1_4069_8e26_5a171972b47b.slice/crio-c3d0e65665f7411e1acb325908fcd63ef1b6ea212d958859efdd6c2989930d26 WatchSource:0}: Error finding container c3d0e65665f7411e1acb325908fcd63ef1b6ea212d958859efdd6c2989930d26: Status 404 returned error can't find the container with id c3d0e65665f7411e1acb325908fcd63ef1b6ea212d958859efdd6c2989930d26 Jan 30 11:00:21 crc kubenswrapper[4869]: I0130 11:00:21.769762 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:00:21 crc kubenswrapper[4869]: I0130 11:00:21.769830 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:00:21 crc kubenswrapper[4869]: I0130 11:00:21.902040 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" event={"ID":"19f2195d-92a1-4069-8e26-5a171972b47b","Type":"ContainerStarted","Data":"ad5d1c4d5b0e823f7148ea9dd6c6e87c98e36ad25481e957b56c5ede659d5b16"} Jan 30 11:00:21 crc kubenswrapper[4869]: I0130 11:00:21.902079 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" event={"ID":"19f2195d-92a1-4069-8e26-5a171972b47b","Type":"ContainerStarted","Data":"c3d0e65665f7411e1acb325908fcd63ef1b6ea212d958859efdd6c2989930d26"} Jan 30 11:00:21 crc kubenswrapper[4869]: I0130 11:00:21.902977 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:21 crc kubenswrapper[4869]: I0130 11:00:21.924062 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" podStartSLOduration=1.9240385230000001 podStartE2EDuration="1.924038523s" podCreationTimestamp="2026-01-30 11:00:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:00:21.918771381 +0000 UTC m=+372.468647467" watchObservedRunningTime="2026-01-30 11:00:21.924038523 +0000 UTC m=+372.473914589" Jan 30 11:00:41 crc kubenswrapper[4869]: I0130 11:00:41.243555 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-slhz9" Jan 30 11:00:41 crc kubenswrapper[4869]: I0130 11:00:41.334169 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-dznqv"] Jan 30 11:00:51 crc kubenswrapper[4869]: I0130 11:00:51.769617 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:00:51 crc kubenswrapper[4869]: I0130 11:00:51.770540 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:01:06 crc kubenswrapper[4869]: I0130 11:01:06.381031 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" podUID="b66a8fd2-73df-48dd-b697-95b2c50e01cd" containerName="registry" containerID="cri-o://4212704624e9d02b68d368d0d373d740046acc03546b2711c76f5d7326189e79" gracePeriod=30 Jan 30 11:01:06 crc kubenswrapper[4869]: I0130 11:01:06.739045 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 11:01:06 crc kubenswrapper[4869]: I0130 11:01:06.804870 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b66a8fd2-73df-48dd-b697-95b2c50e01cd-trusted-ca\") pod \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " Jan 30 11:01:06 crc kubenswrapper[4869]: I0130 11:01:06.804940 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b66a8fd2-73df-48dd-b697-95b2c50e01cd-ca-trust-extracted\") pod \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " Jan 30 11:01:06 crc kubenswrapper[4869]: I0130 11:01:06.804965 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lwh2c\" (UniqueName: \"kubernetes.io/projected/b66a8fd2-73df-48dd-b697-95b2c50e01cd-kube-api-access-lwh2c\") pod \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " Jan 30 11:01:06 crc kubenswrapper[4869]: I0130 11:01:06.805097 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " Jan 30 11:01:06 crc kubenswrapper[4869]: I0130 11:01:06.805125 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b66a8fd2-73df-48dd-b697-95b2c50e01cd-installation-pull-secrets\") pod \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " Jan 30 11:01:06 crc kubenswrapper[4869]: I0130 11:01:06.805165 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b66a8fd2-73df-48dd-b697-95b2c50e01cd-registry-certificates\") pod \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " Jan 30 11:01:06 crc kubenswrapper[4869]: I0130 11:01:06.805200 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b66a8fd2-73df-48dd-b697-95b2c50e01cd-bound-sa-token\") pod \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " Jan 30 11:01:06 crc kubenswrapper[4869]: I0130 11:01:06.805228 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b66a8fd2-73df-48dd-b697-95b2c50e01cd-registry-tls\") pod \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\" (UID: \"b66a8fd2-73df-48dd-b697-95b2c50e01cd\") " Jan 30 11:01:06 crc kubenswrapper[4869]: I0130 11:01:06.805994 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b66a8fd2-73df-48dd-b697-95b2c50e01cd-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "b66a8fd2-73df-48dd-b697-95b2c50e01cd" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:01:06 crc kubenswrapper[4869]: I0130 11:01:06.806443 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b66a8fd2-73df-48dd-b697-95b2c50e01cd-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "b66a8fd2-73df-48dd-b697-95b2c50e01cd" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:01:06 crc kubenswrapper[4869]: I0130 11:01:06.812802 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b66a8fd2-73df-48dd-b697-95b2c50e01cd-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "b66a8fd2-73df-48dd-b697-95b2c50e01cd" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:01:06 crc kubenswrapper[4869]: I0130 11:01:06.812861 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b66a8fd2-73df-48dd-b697-95b2c50e01cd-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "b66a8fd2-73df-48dd-b697-95b2c50e01cd" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:01:06 crc kubenswrapper[4869]: I0130 11:01:06.822149 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b66a8fd2-73df-48dd-b697-95b2c50e01cd-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "b66a8fd2-73df-48dd-b697-95b2c50e01cd" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:01:06 crc kubenswrapper[4869]: I0130 11:01:06.824250 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b66a8fd2-73df-48dd-b697-95b2c50e01cd-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "b66a8fd2-73df-48dd-b697-95b2c50e01cd" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:01:06 crc kubenswrapper[4869]: I0130 11:01:06.824903 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b66a8fd2-73df-48dd-b697-95b2c50e01cd-kube-api-access-lwh2c" (OuterVolumeSpecName: "kube-api-access-lwh2c") pod "b66a8fd2-73df-48dd-b697-95b2c50e01cd" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd"). InnerVolumeSpecName "kube-api-access-lwh2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:01:06 crc kubenswrapper[4869]: I0130 11:01:06.825051 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "b66a8fd2-73df-48dd-b697-95b2c50e01cd" (UID: "b66a8fd2-73df-48dd-b697-95b2c50e01cd"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 30 11:01:06 crc kubenswrapper[4869]: I0130 11:01:06.907032 4869 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b66a8fd2-73df-48dd-b697-95b2c50e01cd-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 30 11:01:06 crc kubenswrapper[4869]: I0130 11:01:06.907080 4869 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b66a8fd2-73df-48dd-b697-95b2c50e01cd-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 30 11:01:06 crc kubenswrapper[4869]: I0130 11:01:06.907089 4869 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b66a8fd2-73df-48dd-b697-95b2c50e01cd-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 30 11:01:06 crc kubenswrapper[4869]: I0130 11:01:06.907099 4869 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b66a8fd2-73df-48dd-b697-95b2c50e01cd-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 30 11:01:06 crc kubenswrapper[4869]: I0130 11:01:06.907108 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lwh2c\" (UniqueName: \"kubernetes.io/projected/b66a8fd2-73df-48dd-b697-95b2c50e01cd-kube-api-access-lwh2c\") on node \"crc\" DevicePath \"\"" Jan 30 11:01:06 crc kubenswrapper[4869]: I0130 11:01:06.907119 4869 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b66a8fd2-73df-48dd-b697-95b2c50e01cd-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 30 11:01:06 crc kubenswrapper[4869]: I0130 11:01:06.907129 4869 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b66a8fd2-73df-48dd-b697-95b2c50e01cd-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 30 11:01:07 crc kubenswrapper[4869]: I0130 11:01:07.193946 4869 generic.go:334] "Generic (PLEG): container finished" podID="b66a8fd2-73df-48dd-b697-95b2c50e01cd" containerID="4212704624e9d02b68d368d0d373d740046acc03546b2711c76f5d7326189e79" exitCode=0 Jan 30 11:01:07 crc kubenswrapper[4869]: I0130 11:01:07.193983 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" event={"ID":"b66a8fd2-73df-48dd-b697-95b2c50e01cd","Type":"ContainerDied","Data":"4212704624e9d02b68d368d0d373d740046acc03546b2711c76f5d7326189e79"} Jan 30 11:01:07 crc kubenswrapper[4869]: I0130 11:01:07.194010 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" event={"ID":"b66a8fd2-73df-48dd-b697-95b2c50e01cd","Type":"ContainerDied","Data":"661fdaec0df62b3577c281399e0723d8c793af3f9f0389d0728e271a612af86f"} Jan 30 11:01:07 crc kubenswrapper[4869]: I0130 11:01:07.194025 4869 scope.go:117] "RemoveContainer" containerID="4212704624e9d02b68d368d0d373d740046acc03546b2711c76f5d7326189e79" Jan 30 11:01:07 crc kubenswrapper[4869]: I0130 11:01:07.194072 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-dznqv" Jan 30 11:01:07 crc kubenswrapper[4869]: I0130 11:01:07.222057 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-dznqv"] Jan 30 11:01:07 crc kubenswrapper[4869]: I0130 11:01:07.222825 4869 scope.go:117] "RemoveContainer" containerID="4212704624e9d02b68d368d0d373d740046acc03546b2711c76f5d7326189e79" Jan 30 11:01:07 crc kubenswrapper[4869]: I0130 11:01:07.226059 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-dznqv"] Jan 30 11:01:07 crc kubenswrapper[4869]: E0130 11:01:07.226324 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4212704624e9d02b68d368d0d373d740046acc03546b2711c76f5d7326189e79\": container with ID starting with 4212704624e9d02b68d368d0d373d740046acc03546b2711c76f5d7326189e79 not found: ID does not exist" containerID="4212704624e9d02b68d368d0d373d740046acc03546b2711c76f5d7326189e79" Jan 30 11:01:07 crc kubenswrapper[4869]: I0130 11:01:07.226376 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4212704624e9d02b68d368d0d373d740046acc03546b2711c76f5d7326189e79"} err="failed to get container status \"4212704624e9d02b68d368d0d373d740046acc03546b2711c76f5d7326189e79\": rpc error: code = NotFound desc = could not find container \"4212704624e9d02b68d368d0d373d740046acc03546b2711c76f5d7326189e79\": container with ID starting with 4212704624e9d02b68d368d0d373d740046acc03546b2711c76f5d7326189e79 not found: ID does not exist" Jan 30 11:01:08 crc kubenswrapper[4869]: I0130 11:01:08.140210 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b66a8fd2-73df-48dd-b697-95b2c50e01cd" path="/var/lib/kubelet/pods/b66a8fd2-73df-48dd-b697-95b2c50e01cd/volumes" Jan 30 11:01:21 crc kubenswrapper[4869]: I0130 11:01:21.769813 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:01:21 crc kubenswrapper[4869]: I0130 11:01:21.770479 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:01:21 crc kubenswrapper[4869]: I0130 11:01:21.770539 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 11:01:21 crc kubenswrapper[4869]: I0130 11:01:21.771054 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ff0bbf8a76427dc77b8dc6f1bcafe269408c942464d9529fc1f94cde0f90a036"} pod="openshift-machine-config-operator/machine-config-daemon-99lr2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 11:01:21 crc kubenswrapper[4869]: I0130 11:01:21.771115 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" containerID="cri-o://ff0bbf8a76427dc77b8dc6f1bcafe269408c942464d9529fc1f94cde0f90a036" gracePeriod=600 Jan 30 11:01:22 crc kubenswrapper[4869]: I0130 11:01:22.269390 4869 generic.go:334] "Generic (PLEG): container finished" podID="ef13186b-7f82-4025-97e3-d899be8c207f" containerID="ff0bbf8a76427dc77b8dc6f1bcafe269408c942464d9529fc1f94cde0f90a036" exitCode=0 Jan 30 11:01:22 crc kubenswrapper[4869]: I0130 11:01:22.269515 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerDied","Data":"ff0bbf8a76427dc77b8dc6f1bcafe269408c942464d9529fc1f94cde0f90a036"} Jan 30 11:01:22 crc kubenswrapper[4869]: I0130 11:01:22.269882 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerStarted","Data":"b42a25ce06c4520062876a9a0c419b39f2083601578538371202936d74ce51ae"} Jan 30 11:01:22 crc kubenswrapper[4869]: I0130 11:01:22.269914 4869 scope.go:117] "RemoveContainer" containerID="ff409dfab36ab3ced7fe057b45cb346cac2b07501febafb180be9e50594c8ba4" Jan 30 11:03:51 crc kubenswrapper[4869]: I0130 11:03:51.769826 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:03:51 crc kubenswrapper[4869]: I0130 11:03:51.770671 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:04:21 crc kubenswrapper[4869]: I0130 11:04:21.769518 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:04:21 crc kubenswrapper[4869]: I0130 11:04:21.770207 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:04:51 crc kubenswrapper[4869]: I0130 11:04:51.769499 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:04:51 crc kubenswrapper[4869]: I0130 11:04:51.770989 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:04:51 crc kubenswrapper[4869]: I0130 11:04:51.771109 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 11:04:51 crc kubenswrapper[4869]: I0130 11:04:51.772232 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b42a25ce06c4520062876a9a0c419b39f2083601578538371202936d74ce51ae"} pod="openshift-machine-config-operator/machine-config-daemon-99lr2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 11:04:51 crc kubenswrapper[4869]: I0130 11:04:51.772329 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" containerID="cri-o://b42a25ce06c4520062876a9a0c419b39f2083601578538371202936d74ce51ae" gracePeriod=600 Jan 30 11:04:52 crc kubenswrapper[4869]: I0130 11:04:52.347888 4869 generic.go:334] "Generic (PLEG): container finished" podID="ef13186b-7f82-4025-97e3-d899be8c207f" containerID="b42a25ce06c4520062876a9a0c419b39f2083601578538371202936d74ce51ae" exitCode=0 Jan 30 11:04:52 crc kubenswrapper[4869]: I0130 11:04:52.347969 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerDied","Data":"b42a25ce06c4520062876a9a0c419b39f2083601578538371202936d74ce51ae"} Jan 30 11:04:52 crc kubenswrapper[4869]: I0130 11:04:52.348612 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerStarted","Data":"c5808a41780bbab079aa25dfbf774b2278de2a0be2251b6799239067a10cf14f"} Jan 30 11:04:52 crc kubenswrapper[4869]: I0130 11:04:52.348635 4869 scope.go:117] "RemoveContainer" containerID="ff0bbf8a76427dc77b8dc6f1bcafe269408c942464d9529fc1f94cde0f90a036" Jan 30 11:05:09 crc kubenswrapper[4869]: I0130 11:05:09.814395 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-twvdq"] Jan 30 11:05:09 crc kubenswrapper[4869]: I0130 11:05:09.815488 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="ovn-controller" containerID="cri-o://cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588" gracePeriod=30 Jan 30 11:05:09 crc kubenswrapper[4869]: I0130 11:05:09.815869 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="sbdb" containerID="cri-o://5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c" gracePeriod=30 Jan 30 11:05:09 crc kubenswrapper[4869]: I0130 11:05:09.815906 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="nbdb" containerID="cri-o://24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a" gracePeriod=30 Jan 30 11:05:09 crc kubenswrapper[4869]: I0130 11:05:09.815937 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="northd" containerID="cri-o://bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441" gracePeriod=30 Jan 30 11:05:09 crc kubenswrapper[4869]: I0130 11:05:09.815964 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0" gracePeriod=30 Jan 30 11:05:09 crc kubenswrapper[4869]: I0130 11:05:09.815993 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="kube-rbac-proxy-node" containerID="cri-o://d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df" gracePeriod=30 Jan 30 11:05:09 crc kubenswrapper[4869]: I0130 11:05:09.816028 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="ovn-acl-logging" containerID="cri-o://5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c" gracePeriod=30 Jan 30 11:05:09 crc kubenswrapper[4869]: I0130 11:05:09.856657 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="ovnkube-controller" containerID="cri-o://0714f38753af86dcac165d4d7e8e420136a54d01e7cceb1047f66d3da6caaba9" gracePeriod=30 Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.164167 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvdq_3e4cac66-8338-46fe-8296-ce9dbd2257bd/ovnkube-controller/3.log" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.166952 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvdq_3e4cac66-8338-46fe-8296-ce9dbd2257bd/ovn-acl-logging/0.log" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.167470 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvdq_3e4cac66-8338-46fe-8296-ce9dbd2257bd/ovn-controller/0.log" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.167992 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.218884 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-468tx"] Jan 30 11:05:10 crc kubenswrapper[4869]: E0130 11:05:10.219211 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="sbdb" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.219227 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="sbdb" Jan 30 11:05:10 crc kubenswrapper[4869]: E0130 11:05:10.219271 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="nbdb" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.219285 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="nbdb" Jan 30 11:05:10 crc kubenswrapper[4869]: E0130 11:05:10.219306 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="ovnkube-controller" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.219353 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="ovnkube-controller" Jan 30 11:05:10 crc kubenswrapper[4869]: E0130 11:05:10.219367 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="ovnkube-controller" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.219379 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="ovnkube-controller" Jan 30 11:05:10 crc kubenswrapper[4869]: E0130 11:05:10.219393 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="kubecfg-setup" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.219433 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="kubecfg-setup" Jan 30 11:05:10 crc kubenswrapper[4869]: E0130 11:05:10.219449 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="ovn-acl-logging" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.219459 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="ovn-acl-logging" Jan 30 11:05:10 crc kubenswrapper[4869]: E0130 11:05:10.219479 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="northd" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.219517 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="northd" Jan 30 11:05:10 crc kubenswrapper[4869]: E0130 11:05:10.219534 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="ovnkube-controller" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.219544 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="ovnkube-controller" Jan 30 11:05:10 crc kubenswrapper[4869]: E0130 11:05:10.219557 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="kube-rbac-proxy-node" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.219567 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="kube-rbac-proxy-node" Jan 30 11:05:10 crc kubenswrapper[4869]: E0130 11:05:10.219612 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b66a8fd2-73df-48dd-b697-95b2c50e01cd" containerName="registry" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.219623 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b66a8fd2-73df-48dd-b697-95b2c50e01cd" containerName="registry" Jan 30 11:05:10 crc kubenswrapper[4869]: E0130 11:05:10.219642 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="kube-rbac-proxy-ovn-metrics" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.219653 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="kube-rbac-proxy-ovn-metrics" Jan 30 11:05:10 crc kubenswrapper[4869]: E0130 11:05:10.219703 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="ovn-controller" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.219751 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="ovn-controller" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.219962 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="ovn-acl-logging" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.220012 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="ovnkube-controller" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.220024 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b66a8fd2-73df-48dd-b697-95b2c50e01cd" containerName="registry" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.220041 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="ovn-controller" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.220086 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="kube-rbac-proxy-node" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.220103 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="northd" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.220117 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="ovnkube-controller" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.220174 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="sbdb" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.220203 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="ovnkube-controller" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.220253 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="nbdb" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.220270 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="kube-rbac-proxy-ovn-metrics" Jan 30 11:05:10 crc kubenswrapper[4869]: E0130 11:05:10.220447 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="ovnkube-controller" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.220458 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="ovnkube-controller" Jan 30 11:05:10 crc kubenswrapper[4869]: E0130 11:05:10.220649 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="ovnkube-controller" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.220669 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="ovnkube-controller" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.220798 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="ovnkube-controller" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.221076 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerName="ovnkube-controller" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.223006 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.273611 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-cni-bin\") pod \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.273683 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3e4cac66-8338-46fe-8296-ce9dbd2257bd-ovnkube-config\") pod \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.273719 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-run-systemd\") pod \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.273737 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-slash\") pod \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.273785 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-slash" (OuterVolumeSpecName: "host-slash") pod "3e4cac66-8338-46fe-8296-ce9dbd2257bd" (UID: "3e4cac66-8338-46fe-8296-ce9dbd2257bd"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.273803 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-kubelet\") pod \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.273817 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "3e4cac66-8338-46fe-8296-ce9dbd2257bd" (UID: "3e4cac66-8338-46fe-8296-ce9dbd2257bd"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.273967 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-var-lib-cni-networks-ovn-kubernetes\") pod \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274040 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "3e4cac66-8338-46fe-8296-ce9dbd2257bd" (UID: "3e4cac66-8338-46fe-8296-ce9dbd2257bd"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274098 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-cni-netd\") pod \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274118 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "3e4cac66-8338-46fe-8296-ce9dbd2257bd" (UID: "3e4cac66-8338-46fe-8296-ce9dbd2257bd"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274116 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "3e4cac66-8338-46fe-8296-ce9dbd2257bd" (UID: "3e4cac66-8338-46fe-8296-ce9dbd2257bd"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274320 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-node-log\") pod \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274351 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e4cac66-8338-46fe-8296-ce9dbd2257bd-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "3e4cac66-8338-46fe-8296-ce9dbd2257bd" (UID: "3e4cac66-8338-46fe-8296-ce9dbd2257bd"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274359 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-systemd-units\") pod \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274376 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-node-log" (OuterVolumeSpecName: "node-log") pod "3e4cac66-8338-46fe-8296-ce9dbd2257bd" (UID: "3e4cac66-8338-46fe-8296-ce9dbd2257bd"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274405 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9wc2x\" (UniqueName: \"kubernetes.io/projected/3e4cac66-8338-46fe-8296-ce9dbd2257bd-kube-api-access-9wc2x\") pod \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274427 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-run-ovn-kubernetes\") pod \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274445 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-run-openvswitch\") pod \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274473 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-log-socket\") pod \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274495 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3e4cac66-8338-46fe-8296-ce9dbd2257bd-env-overrides\") pod \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274516 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-run-netns\") pod \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274491 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "3e4cac66-8338-46fe-8296-ce9dbd2257bd" (UID: "3e4cac66-8338-46fe-8296-ce9dbd2257bd"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274547 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3e4cac66-8338-46fe-8296-ce9dbd2257bd-ovn-node-metrics-cert\") pod \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274611 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-run-ovn\") pod \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274633 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3e4cac66-8338-46fe-8296-ce9dbd2257bd-ovnkube-script-lib\") pod \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274666 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-var-lib-openvswitch\") pod \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274685 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-etc-openvswitch\") pod \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\" (UID: \"3e4cac66-8338-46fe-8296-ce9dbd2257bd\") " Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274857 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/aba39956-6101-4e33-a348-0a4d1f099a40-ovnkube-config\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274890 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-run-openvswitch\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274914 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-host-kubelet\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274945 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-host-cni-netd\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274978 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-host-run-ovn-kubernetes\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274997 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/aba39956-6101-4e33-a348-0a4d1f099a40-ovn-node-metrics-cert\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275012 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-host-cni-bin\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275082 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-log-socket\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275103 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274498 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "3e4cac66-8338-46fe-8296-ce9dbd2257bd" (UID: "3e4cac66-8338-46fe-8296-ce9dbd2257bd"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274512 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-log-socket" (OuterVolumeSpecName: "log-socket") pod "3e4cac66-8338-46fe-8296-ce9dbd2257bd" (UID: "3e4cac66-8338-46fe-8296-ce9dbd2257bd"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274533 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "3e4cac66-8338-46fe-8296-ce9dbd2257bd" (UID: "3e4cac66-8338-46fe-8296-ce9dbd2257bd"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274562 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "3e4cac66-8338-46fe-8296-ce9dbd2257bd" (UID: "3e4cac66-8338-46fe-8296-ce9dbd2257bd"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.274857 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e4cac66-8338-46fe-8296-ce9dbd2257bd-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "3e4cac66-8338-46fe-8296-ce9dbd2257bd" (UID: "3e4cac66-8338-46fe-8296-ce9dbd2257bd"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275152 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e4cac66-8338-46fe-8296-ce9dbd2257bd-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "3e4cac66-8338-46fe-8296-ce9dbd2257bd" (UID: "3e4cac66-8338-46fe-8296-ce9dbd2257bd"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275131 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-run-ovn\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275175 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "3e4cac66-8338-46fe-8296-ce9dbd2257bd" (UID: "3e4cac66-8338-46fe-8296-ce9dbd2257bd"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275187 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "3e4cac66-8338-46fe-8296-ce9dbd2257bd" (UID: "3e4cac66-8338-46fe-8296-ce9dbd2257bd"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275219 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-node-log\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275239 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "3e4cac66-8338-46fe-8296-ce9dbd2257bd" (UID: "3e4cac66-8338-46fe-8296-ce9dbd2257bd"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275332 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-host-run-netns\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275351 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-etc-openvswitch\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275406 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-systemd-units\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275434 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-var-lib-openvswitch\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275456 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-run-systemd\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275478 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6phg\" (UniqueName: \"kubernetes.io/projected/aba39956-6101-4e33-a348-0a4d1f099a40-kube-api-access-r6phg\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275492 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/aba39956-6101-4e33-a348-0a4d1f099a40-env-overrides\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275509 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-host-slash\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275531 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/aba39956-6101-4e33-a348-0a4d1f099a40-ovnkube-script-lib\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275595 4869 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275606 4869 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3e4cac66-8338-46fe-8296-ce9dbd2257bd-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275617 4869 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275626 4869 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275636 4869 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-cni-bin\") on node \"crc\" DevicePath \"\"" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275644 4869 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3e4cac66-8338-46fe-8296-ce9dbd2257bd-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275652 4869 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-slash\") on node \"crc\" DevicePath \"\"" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275660 4869 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-kubelet\") on node \"crc\" DevicePath \"\"" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275668 4869 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275676 4869 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-cni-netd\") on node \"crc\" DevicePath \"\"" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275684 4869 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-node-log\") on node \"crc\" DevicePath \"\"" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275694 4869 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-systemd-units\") on node \"crc\" DevicePath \"\"" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275702 4869 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275728 4869 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-run-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275736 4869 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-log-socket\") on node \"crc\" DevicePath \"\"" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275744 4869 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3e4cac66-8338-46fe-8296-ce9dbd2257bd-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.275751 4869 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-host-run-netns\") on node \"crc\" DevicePath \"\"" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.280102 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e4cac66-8338-46fe-8296-ce9dbd2257bd-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "3e4cac66-8338-46fe-8296-ce9dbd2257bd" (UID: "3e4cac66-8338-46fe-8296-ce9dbd2257bd"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.280331 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e4cac66-8338-46fe-8296-ce9dbd2257bd-kube-api-access-9wc2x" (OuterVolumeSpecName: "kube-api-access-9wc2x") pod "3e4cac66-8338-46fe-8296-ce9dbd2257bd" (UID: "3e4cac66-8338-46fe-8296-ce9dbd2257bd"). InnerVolumeSpecName "kube-api-access-9wc2x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.290106 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "3e4cac66-8338-46fe-8296-ce9dbd2257bd" (UID: "3e4cac66-8338-46fe-8296-ce9dbd2257bd"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.303314 4869 scope.go:117] "RemoveContainer" containerID="32a26c6c526e3accc4bdba8be97b33df3cd756ca0405ab85d6b12e552e50cebe" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.319015 4869 scope.go:117] "RemoveContainer" containerID="0714f38753af86dcac165d4d7e8e420136a54d01e7cceb1047f66d3da6caaba9" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.335877 4869 scope.go:117] "RemoveContainer" containerID="5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.348993 4869 scope.go:117] "RemoveContainer" containerID="7bd6e53593f59f611f433d9cd72ccd9d5d4c1853abb7b9face64457860c5a366" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.365991 4869 scope.go:117] "RemoveContainer" containerID="24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.376600 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-var-lib-openvswitch\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.376653 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-run-systemd\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.376692 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6phg\" (UniqueName: \"kubernetes.io/projected/aba39956-6101-4e33-a348-0a4d1f099a40-kube-api-access-r6phg\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.376743 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/aba39956-6101-4e33-a348-0a4d1f099a40-env-overrides\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.376768 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-host-slash\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.376783 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-var-lib-openvswitch\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.376834 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-host-slash\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.376802 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/aba39956-6101-4e33-a348-0a4d1f099a40-ovnkube-script-lib\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.376785 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-run-systemd\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377044 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/aba39956-6101-4e33-a348-0a4d1f099a40-ovnkube-config\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377082 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-run-openvswitch\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377120 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-host-kubelet\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377221 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-run-openvswitch\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377235 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-host-kubelet\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377183 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-host-cni-netd\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377335 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-host-cni-netd\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377377 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-host-run-ovn-kubernetes\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377346 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-host-run-ovn-kubernetes\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377436 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/aba39956-6101-4e33-a348-0a4d1f099a40-ovn-node-metrics-cert\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377470 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-host-cni-bin\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377494 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-log-socket\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377532 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-host-cni-bin\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377545 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377574 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377610 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-log-socket\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377621 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-run-ovn\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377614 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/aba39956-6101-4e33-a348-0a4d1f099a40-env-overrides\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377646 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-run-ovn\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377648 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-node-log\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377675 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-node-log\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377762 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-host-run-netns\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377795 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-etc-openvswitch\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377806 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/aba39956-6101-4e33-a348-0a4d1f099a40-ovnkube-script-lib\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377821 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-systemd-units\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377871 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-systemd-units\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377889 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-host-run-netns\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377897 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/aba39956-6101-4e33-a348-0a4d1f099a40-etc-openvswitch\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377874 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/aba39956-6101-4e33-a348-0a4d1f099a40-ovnkube-config\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377952 4869 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3e4cac66-8338-46fe-8296-ce9dbd2257bd-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377975 4869 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3e4cac66-8338-46fe-8296-ce9dbd2257bd-run-systemd\") on node \"crc\" DevicePath \"\"" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.377987 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9wc2x\" (UniqueName: \"kubernetes.io/projected/3e4cac66-8338-46fe-8296-ce9dbd2257bd-kube-api-access-9wc2x\") on node \"crc\" DevicePath \"\"" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.385029 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/aba39956-6101-4e33-a348-0a4d1f099a40-ovn-node-metrics-cert\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.387191 4869 scope.go:117] "RemoveContainer" containerID="4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.394373 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6phg\" (UniqueName: \"kubernetes.io/projected/aba39956-6101-4e33-a348-0a4d1f099a40-kube-api-access-r6phg\") pod \"ovnkube-node-468tx\" (UID: \"aba39956-6101-4e33-a348-0a4d1f099a40\") " pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.406748 4869 scope.go:117] "RemoveContainer" containerID="d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.420031 4869 scope.go:117] "RemoveContainer" containerID="5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.444945 4869 scope.go:117] "RemoveContainer" containerID="bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.445519 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5jpbv_02f48f89-74aa-48e8-930e-7a86f15de2de/kube-multus/2.log" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.446156 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5jpbv_02f48f89-74aa-48e8-930e-7a86f15de2de/kube-multus/1.log" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.446200 4869 generic.go:334] "Generic (PLEG): container finished" podID="02f48f89-74aa-48e8-930e-7a86f15de2de" containerID="789b440a13044433df32646c52cdc72df74090c4be9a71b1135371073ef0683d" exitCode=2 Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.446261 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5jpbv" event={"ID":"02f48f89-74aa-48e8-930e-7a86f15de2de","Type":"ContainerDied","Data":"789b440a13044433df32646c52cdc72df74090c4be9a71b1135371073ef0683d"} Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.446299 4869 scope.go:117] "RemoveContainer" containerID="e6654bc5cbb371c1a2362dee3aa8930ddf905299c3fe6cc1805e31c315b21024" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.447069 4869 scope.go:117] "RemoveContainer" containerID="789b440a13044433df32646c52cdc72df74090c4be9a71b1135371073ef0683d" Jan 30 11:05:10 crc kubenswrapper[4869]: E0130 11:05:10.447481 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-5jpbv_openshift-multus(02f48f89-74aa-48e8-930e-7a86f15de2de)\"" pod="openshift-multus/multus-5jpbv" podUID="02f48f89-74aa-48e8-930e-7a86f15de2de" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.452835 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvdq_3e4cac66-8338-46fe-8296-ce9dbd2257bd/ovn-controller/0.log" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.452882 4869 generic.go:334] "Generic (PLEG): container finished" podID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerID="bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441" exitCode=0 Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.452901 4869 generic.go:334] "Generic (PLEG): container finished" podID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" containerID="cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588" exitCode=143 Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.452923 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerDied","Data":"0714f38753af86dcac165d4d7e8e420136a54d01e7cceb1047f66d3da6caaba9"} Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.452953 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerDied","Data":"5f17f189ca9a216cc1b4e3cc7fee2da1ca6cb7caa1e7f24855d41f754ff6942c"} Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.452963 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerDied","Data":"24d7a0e7e2b8b625fa90a1ede06d6415575a06fcbdabbf812da0097c7fa10f7a"} Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.452973 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerDied","Data":"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441"} Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.452982 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerDied","Data":"4dde664692df54244518412f2f3c6842872a219cd9e73d8ed78b41c9f0ad34e0"} Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.452994 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerDied","Data":"d6d7ee7e73ff8c362fed4e2b2dce858fff9e391829987877c098f7142fc034df"} Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.452998 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.453006 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerDied","Data":"5ff94169f8743ae1722413d1c9a5ff310bdbfc7a27a33aea93b53e4b7efe757c"} Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.453104 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerDied","Data":"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588"} Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.453114 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvdq" event={"ID":"3e4cac66-8338-46fe-8296-ce9dbd2257bd","Type":"ContainerDied","Data":"a3edb8ad57797f63733b91bcf2451a1fb6b443a155222148386450ad0e0cf3a4"} Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.468485 4869 scope.go:117] "RemoveContainer" containerID="cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.478820 4869 scope.go:117] "RemoveContainer" containerID="bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441" Jan 30 11:05:10 crc kubenswrapper[4869]: E0130 11:05:10.479637 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\": container with ID starting with bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441 not found: ID does not exist" containerID="bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.479677 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441"} err="failed to get container status \"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\": rpc error: code = NotFound desc = could not find container \"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\": container with ID starting with bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441 not found: ID does not exist" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.479702 4869 scope.go:117] "RemoveContainer" containerID="cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.492777 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-twvdq"] Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.494162 4869 scope.go:117] "RemoveContainer" containerID="e6654bc5cbb371c1a2362dee3aa8930ddf905299c3fe6cc1805e31c315b21024" Jan 30 11:05:10 crc kubenswrapper[4869]: E0130 11:05:10.494177 4869 log.go:32] "RemoveContainer from runtime service failed" err="rpc error: code = Unknown desc = failed to delete container k8s_ovn-controller_ovnkube-node-twvdq_openshift-ovn-kubernetes_3e4cac66-8338-46fe-8296-ce9dbd2257bd_0 in pod sandbox a3edb8ad57797f63733b91bcf2451a1fb6b443a155222148386450ad0e0cf3a4 from index: no such id: 'cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588'" containerID="cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.494223 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588"} err="rpc error: code = Unknown desc = failed to delete container k8s_ovn-controller_ovnkube-node-twvdq_openshift-ovn-kubernetes_3e4cac66-8338-46fe-8296-ce9dbd2257bd_0 in pod sandbox a3edb8ad57797f63733b91bcf2451a1fb6b443a155222148386450ad0e0cf3a4 from index: no such id: 'cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588'" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.494256 4869 scope.go:117] "RemoveContainer" containerID="bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441" Jan 30 11:05:10 crc kubenswrapper[4869]: E0130 11:05:10.494628 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6654bc5cbb371c1a2362dee3aa8930ddf905299c3fe6cc1805e31c315b21024\": container with ID starting with e6654bc5cbb371c1a2362dee3aa8930ddf905299c3fe6cc1805e31c315b21024 not found: ID does not exist" containerID="e6654bc5cbb371c1a2362dee3aa8930ddf905299c3fe6cc1805e31c315b21024" Jan 30 11:05:10 crc kubenswrapper[4869]: E0130 11:05:10.494678 4869 kuberuntime_gc.go:150] "Failed to remove container" err="failed to get container status \"e6654bc5cbb371c1a2362dee3aa8930ddf905299c3fe6cc1805e31c315b21024\": rpc error: code = NotFound desc = could not find container \"e6654bc5cbb371c1a2362dee3aa8930ddf905299c3fe6cc1805e31c315b21024\": container with ID starting with e6654bc5cbb371c1a2362dee3aa8930ddf905299c3fe6cc1805e31c315b21024 not found: ID does not exist" containerID="e6654bc5cbb371c1a2362dee3aa8930ddf905299c3fe6cc1805e31c315b21024" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.495030 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441"} err="failed to get container status \"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\": rpc error: code = NotFound desc = could not find container \"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\": container with ID starting with bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441 not found: ID does not exist" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.495062 4869 scope.go:117] "RemoveContainer" containerID="cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588" Jan 30 11:05:10 crc kubenswrapper[4869]: E0130 11:05:10.495292 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\": container with ID starting with cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588 not found: ID does not exist" containerID="cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.495321 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588"} err="failed to get container status \"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\": rpc error: code = NotFound desc = could not find container \"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\": container with ID starting with cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588 not found: ID does not exist" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.495348 4869 scope.go:117] "RemoveContainer" containerID="bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.495542 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441"} err="failed to get container status \"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\": rpc error: code = NotFound desc = could not find container \"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\": container with ID starting with bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441 not found: ID does not exist" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.495565 4869 scope.go:117] "RemoveContainer" containerID="cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.495772 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588"} err="failed to get container status \"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\": rpc error: code = NotFound desc = could not find container \"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\": container with ID starting with cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588 not found: ID does not exist" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.495794 4869 scope.go:117] "RemoveContainer" containerID="bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.496082 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441"} err="failed to get container status \"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\": rpc error: code = NotFound desc = could not find container \"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\": container with ID starting with bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441 not found: ID does not exist" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.496110 4869 scope.go:117] "RemoveContainer" containerID="cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.496942 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588"} err="failed to get container status \"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\": rpc error: code = NotFound desc = could not find container \"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\": container with ID starting with cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588 not found: ID does not exist" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.496972 4869 scope.go:117] "RemoveContainer" containerID="bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.497261 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441"} err="failed to get container status \"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\": rpc error: code = NotFound desc = could not find container \"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\": container with ID starting with bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441 not found: ID does not exist" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.497277 4869 scope.go:117] "RemoveContainer" containerID="cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.497511 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-twvdq"] Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.497650 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588"} err="failed to get container status \"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\": rpc error: code = NotFound desc = could not find container \"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\": container with ID starting with cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588 not found: ID does not exist" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.497677 4869 scope.go:117] "RemoveContainer" containerID="bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.497897 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441"} err="failed to get container status \"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\": rpc error: code = NotFound desc = could not find container \"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\": container with ID starting with bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441 not found: ID does not exist" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.497917 4869 scope.go:117] "RemoveContainer" containerID="cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.498121 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588"} err="failed to get container status \"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\": rpc error: code = NotFound desc = could not find container \"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\": container with ID starting with cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588 not found: ID does not exist" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.498142 4869 scope.go:117] "RemoveContainer" containerID="bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.498337 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441"} err="failed to get container status \"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\": rpc error: code = NotFound desc = could not find container \"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\": container with ID starting with bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441 not found: ID does not exist" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.498355 4869 scope.go:117] "RemoveContainer" containerID="cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.498611 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588"} err="failed to get container status \"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\": rpc error: code = NotFound desc = could not find container \"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\": container with ID starting with cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588 not found: ID does not exist" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.498634 4869 scope.go:117] "RemoveContainer" containerID="bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.499027 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441"} err="failed to get container status \"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\": rpc error: code = NotFound desc = could not find container \"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\": container with ID starting with bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441 not found: ID does not exist" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.499047 4869 scope.go:117] "RemoveContainer" containerID="cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.499285 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588"} err="failed to get container status \"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\": rpc error: code = NotFound desc = could not find container \"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\": container with ID starting with cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588 not found: ID does not exist" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.499306 4869 scope.go:117] "RemoveContainer" containerID="bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.499538 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441"} err="failed to get container status \"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\": rpc error: code = NotFound desc = could not find container \"bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441\": container with ID starting with bfcb96b016960fad15307ab59da7c073fbfea39f78605d7d1bc44fb0147e1441 not found: ID does not exist" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.499564 4869 scope.go:117] "RemoveContainer" containerID="cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.499818 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588"} err="failed to get container status \"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\": rpc error: code = NotFound desc = could not find container \"cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588\": container with ID starting with cb3016aabc0e8b0b69405b24e3ee5b1b3f52bd941a49a1d643ff54a46fea1588 not found: ID does not exist" Jan 30 11:05:10 crc kubenswrapper[4869]: I0130 11:05:10.537072 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:11 crc kubenswrapper[4869]: I0130 11:05:11.460771 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5jpbv_02f48f89-74aa-48e8-930e-7a86f15de2de/kube-multus/2.log" Jan 30 11:05:11 crc kubenswrapper[4869]: I0130 11:05:11.462587 4869 generic.go:334] "Generic (PLEG): container finished" podID="aba39956-6101-4e33-a348-0a4d1f099a40" containerID="6f0a9aad47bc03bb6a547924446af23844d5394beb4e8b2ba655d0fb291cf2dc" exitCode=0 Jan 30 11:05:11 crc kubenswrapper[4869]: I0130 11:05:11.462631 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-468tx" event={"ID":"aba39956-6101-4e33-a348-0a4d1f099a40","Type":"ContainerDied","Data":"6f0a9aad47bc03bb6a547924446af23844d5394beb4e8b2ba655d0fb291cf2dc"} Jan 30 11:05:11 crc kubenswrapper[4869]: I0130 11:05:11.462657 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-468tx" event={"ID":"aba39956-6101-4e33-a348-0a4d1f099a40","Type":"ContainerStarted","Data":"676cd125b6471b590f7d43d50af23a04877bf367dedfe448284647412612ab0c"} Jan 30 11:05:12 crc kubenswrapper[4869]: I0130 11:05:12.141416 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e4cac66-8338-46fe-8296-ce9dbd2257bd" path="/var/lib/kubelet/pods/3e4cac66-8338-46fe-8296-ce9dbd2257bd/volumes" Jan 30 11:05:12 crc kubenswrapper[4869]: I0130 11:05:12.470361 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-468tx" event={"ID":"aba39956-6101-4e33-a348-0a4d1f099a40","Type":"ContainerStarted","Data":"d3a1774cd13576cd3e2c7438e68a597fcb2e4e1fd9984b56bfc47bd1940509c7"} Jan 30 11:05:12 crc kubenswrapper[4869]: I0130 11:05:12.470401 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-468tx" event={"ID":"aba39956-6101-4e33-a348-0a4d1f099a40","Type":"ContainerStarted","Data":"437db94d2c9bc8deea55c1d0302e2ced6b2117a4739a9a9450895adfe356091a"} Jan 30 11:05:12 crc kubenswrapper[4869]: I0130 11:05:12.470411 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-468tx" event={"ID":"aba39956-6101-4e33-a348-0a4d1f099a40","Type":"ContainerStarted","Data":"332adfd783d74f7b09b530380c9c442603d6e41bc262331c16722a6c89c6d3dc"} Jan 30 11:05:12 crc kubenswrapper[4869]: I0130 11:05:12.470455 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-468tx" event={"ID":"aba39956-6101-4e33-a348-0a4d1f099a40","Type":"ContainerStarted","Data":"2286bc72eaaa8794d05a9e483a209947940b37ed3b1fc4f8211f6becd7afb063"} Jan 30 11:05:12 crc kubenswrapper[4869]: I0130 11:05:12.470465 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-468tx" event={"ID":"aba39956-6101-4e33-a348-0a4d1f099a40","Type":"ContainerStarted","Data":"5c81a760cb4034569b61693372aec4fe3d307e72556d5fc0b008c9595f18db18"} Jan 30 11:05:12 crc kubenswrapper[4869]: I0130 11:05:12.470475 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-468tx" event={"ID":"aba39956-6101-4e33-a348-0a4d1f099a40","Type":"ContainerStarted","Data":"1241642f984832c2d3da4f2faed00c50d2c46ec9762a7d55bea573bbf6261283"} Jan 30 11:05:14 crc kubenswrapper[4869]: I0130 11:05:14.496935 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-468tx" event={"ID":"aba39956-6101-4e33-a348-0a4d1f099a40","Type":"ContainerStarted","Data":"2c21c1f7c7f0da1df6d2d671e471b4ea3408ff2698849e3fcb284374236cb688"} Jan 30 11:05:16 crc kubenswrapper[4869]: I0130 11:05:16.391739 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-zcxm2"] Jan 30 11:05:16 crc kubenswrapper[4869]: I0130 11:05:16.392957 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zcxm2" Jan 30 11:05:16 crc kubenswrapper[4869]: I0130 11:05:16.395394 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 30 11:05:16 crc kubenswrapper[4869]: I0130 11:05:16.395929 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 30 11:05:16 crc kubenswrapper[4869]: I0130 11:05:16.395997 4869 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-qxrkw" Jan 30 11:05:16 crc kubenswrapper[4869]: I0130 11:05:16.396174 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 30 11:05:16 crc kubenswrapper[4869]: I0130 11:05:16.444609 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/5f08cad2-3aae-4ab0-bea9-b9242a2de799-node-mnt\") pod \"crc-storage-crc-zcxm2\" (UID: \"5f08cad2-3aae-4ab0-bea9-b9242a2de799\") " pod="crc-storage/crc-storage-crc-zcxm2" Jan 30 11:05:16 crc kubenswrapper[4869]: I0130 11:05:16.444659 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n79f8\" (UniqueName: \"kubernetes.io/projected/5f08cad2-3aae-4ab0-bea9-b9242a2de799-kube-api-access-n79f8\") pod \"crc-storage-crc-zcxm2\" (UID: \"5f08cad2-3aae-4ab0-bea9-b9242a2de799\") " pod="crc-storage/crc-storage-crc-zcxm2" Jan 30 11:05:16 crc kubenswrapper[4869]: I0130 11:05:16.444705 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/5f08cad2-3aae-4ab0-bea9-b9242a2de799-crc-storage\") pod \"crc-storage-crc-zcxm2\" (UID: \"5f08cad2-3aae-4ab0-bea9-b9242a2de799\") " pod="crc-storage/crc-storage-crc-zcxm2" Jan 30 11:05:16 crc kubenswrapper[4869]: I0130 11:05:16.545365 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/5f08cad2-3aae-4ab0-bea9-b9242a2de799-crc-storage\") pod \"crc-storage-crc-zcxm2\" (UID: \"5f08cad2-3aae-4ab0-bea9-b9242a2de799\") " pod="crc-storage/crc-storage-crc-zcxm2" Jan 30 11:05:16 crc kubenswrapper[4869]: I0130 11:05:16.545752 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/5f08cad2-3aae-4ab0-bea9-b9242a2de799-node-mnt\") pod \"crc-storage-crc-zcxm2\" (UID: \"5f08cad2-3aae-4ab0-bea9-b9242a2de799\") " pod="crc-storage/crc-storage-crc-zcxm2" Jan 30 11:05:16 crc kubenswrapper[4869]: I0130 11:05:16.545775 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n79f8\" (UniqueName: \"kubernetes.io/projected/5f08cad2-3aae-4ab0-bea9-b9242a2de799-kube-api-access-n79f8\") pod \"crc-storage-crc-zcxm2\" (UID: \"5f08cad2-3aae-4ab0-bea9-b9242a2de799\") " pod="crc-storage/crc-storage-crc-zcxm2" Jan 30 11:05:16 crc kubenswrapper[4869]: I0130 11:05:16.545951 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/5f08cad2-3aae-4ab0-bea9-b9242a2de799-node-mnt\") pod \"crc-storage-crc-zcxm2\" (UID: \"5f08cad2-3aae-4ab0-bea9-b9242a2de799\") " pod="crc-storage/crc-storage-crc-zcxm2" Jan 30 11:05:16 crc kubenswrapper[4869]: I0130 11:05:16.546220 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/5f08cad2-3aae-4ab0-bea9-b9242a2de799-crc-storage\") pod \"crc-storage-crc-zcxm2\" (UID: \"5f08cad2-3aae-4ab0-bea9-b9242a2de799\") " pod="crc-storage/crc-storage-crc-zcxm2" Jan 30 11:05:16 crc kubenswrapper[4869]: I0130 11:05:16.563670 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n79f8\" (UniqueName: \"kubernetes.io/projected/5f08cad2-3aae-4ab0-bea9-b9242a2de799-kube-api-access-n79f8\") pod \"crc-storage-crc-zcxm2\" (UID: \"5f08cad2-3aae-4ab0-bea9-b9242a2de799\") " pod="crc-storage/crc-storage-crc-zcxm2" Jan 30 11:05:16 crc kubenswrapper[4869]: I0130 11:05:16.726076 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zcxm2" Jan 30 11:05:16 crc kubenswrapper[4869]: E0130 11:05:16.756919 4869 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-zcxm2_crc-storage_5f08cad2-3aae-4ab0-bea9-b9242a2de799_0(925488f01aa337a35edfe2da57e6cdb92745d3932ef370cf9cea8494d3dd91e0): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 30 11:05:16 crc kubenswrapper[4869]: E0130 11:05:16.756984 4869 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-zcxm2_crc-storage_5f08cad2-3aae-4ab0-bea9-b9242a2de799_0(925488f01aa337a35edfe2da57e6cdb92745d3932ef370cf9cea8494d3dd91e0): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-zcxm2" Jan 30 11:05:16 crc kubenswrapper[4869]: E0130 11:05:16.757005 4869 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-zcxm2_crc-storage_5f08cad2-3aae-4ab0-bea9-b9242a2de799_0(925488f01aa337a35edfe2da57e6cdb92745d3932ef370cf9cea8494d3dd91e0): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-zcxm2" Jan 30 11:05:16 crc kubenswrapper[4869]: E0130 11:05:16.757044 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-zcxm2_crc-storage(5f08cad2-3aae-4ab0-bea9-b9242a2de799)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-zcxm2_crc-storage(5f08cad2-3aae-4ab0-bea9-b9242a2de799)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-zcxm2_crc-storage_5f08cad2-3aae-4ab0-bea9-b9242a2de799_0(925488f01aa337a35edfe2da57e6cdb92745d3932ef370cf9cea8494d3dd91e0): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-zcxm2" podUID="5f08cad2-3aae-4ab0-bea9-b9242a2de799" Jan 30 11:05:17 crc kubenswrapper[4869]: I0130 11:05:17.260228 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-zcxm2"] Jan 30 11:05:17 crc kubenswrapper[4869]: I0130 11:05:17.513180 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-468tx" event={"ID":"aba39956-6101-4e33-a348-0a4d1f099a40","Type":"ContainerStarted","Data":"bf25c281f7d02dd651e19cc785de0311d2d87363fb179074a8e8a57e72cf6fc1"} Jan 30 11:05:17 crc kubenswrapper[4869]: I0130 11:05:17.513199 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zcxm2" Jan 30 11:05:17 crc kubenswrapper[4869]: I0130 11:05:17.513615 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zcxm2" Jan 30 11:05:17 crc kubenswrapper[4869]: E0130 11:05:17.531930 4869 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-zcxm2_crc-storage_5f08cad2-3aae-4ab0-bea9-b9242a2de799_0(c9174fc73b0aad538a070808b6df50ec5a14ebfdf1aa10cf7b12c44b62231826): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 30 11:05:17 crc kubenswrapper[4869]: E0130 11:05:17.532018 4869 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-zcxm2_crc-storage_5f08cad2-3aae-4ab0-bea9-b9242a2de799_0(c9174fc73b0aad538a070808b6df50ec5a14ebfdf1aa10cf7b12c44b62231826): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-zcxm2" Jan 30 11:05:17 crc kubenswrapper[4869]: E0130 11:05:17.532053 4869 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-zcxm2_crc-storage_5f08cad2-3aae-4ab0-bea9-b9242a2de799_0(c9174fc73b0aad538a070808b6df50ec5a14ebfdf1aa10cf7b12c44b62231826): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-zcxm2" Jan 30 11:05:17 crc kubenswrapper[4869]: E0130 11:05:17.532116 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-zcxm2_crc-storage(5f08cad2-3aae-4ab0-bea9-b9242a2de799)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-zcxm2_crc-storage(5f08cad2-3aae-4ab0-bea9-b9242a2de799)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-zcxm2_crc-storage_5f08cad2-3aae-4ab0-bea9-b9242a2de799_0(c9174fc73b0aad538a070808b6df50ec5a14ebfdf1aa10cf7b12c44b62231826): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-zcxm2" podUID="5f08cad2-3aae-4ab0-bea9-b9242a2de799" Jan 30 11:05:17 crc kubenswrapper[4869]: I0130 11:05:17.545170 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-468tx" podStartSLOduration=7.545153773 podStartE2EDuration="7.545153773s" podCreationTimestamp="2026-01-30 11:05:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:05:17.542891959 +0000 UTC m=+668.092768045" watchObservedRunningTime="2026-01-30 11:05:17.545153773 +0000 UTC m=+668.095029839" Jan 30 11:05:18 crc kubenswrapper[4869]: I0130 11:05:18.521609 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:18 crc kubenswrapper[4869]: I0130 11:05:18.522043 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:18 crc kubenswrapper[4869]: I0130 11:05:18.522056 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:18 crc kubenswrapper[4869]: I0130 11:05:18.551851 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:18 crc kubenswrapper[4869]: I0130 11:05:18.555968 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:23 crc kubenswrapper[4869]: I0130 11:05:23.133908 4869 scope.go:117] "RemoveContainer" containerID="789b440a13044433df32646c52cdc72df74090c4be9a71b1135371073ef0683d" Jan 30 11:05:23 crc kubenswrapper[4869]: E0130 11:05:23.134532 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-5jpbv_openshift-multus(02f48f89-74aa-48e8-930e-7a86f15de2de)\"" pod="openshift-multus/multus-5jpbv" podUID="02f48f89-74aa-48e8-930e-7a86f15de2de" Jan 30 11:05:30 crc kubenswrapper[4869]: I0130 11:05:30.136060 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zcxm2" Jan 30 11:05:30 crc kubenswrapper[4869]: I0130 11:05:30.137258 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zcxm2" Jan 30 11:05:30 crc kubenswrapper[4869]: E0130 11:05:30.161958 4869 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-zcxm2_crc-storage_5f08cad2-3aae-4ab0-bea9-b9242a2de799_0(ede3c6dfffbfd03bdba544d9d3bbaa75c500dfe1539e8680faad348684f9272c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 30 11:05:30 crc kubenswrapper[4869]: E0130 11:05:30.162030 4869 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-zcxm2_crc-storage_5f08cad2-3aae-4ab0-bea9-b9242a2de799_0(ede3c6dfffbfd03bdba544d9d3bbaa75c500dfe1539e8680faad348684f9272c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-zcxm2" Jan 30 11:05:30 crc kubenswrapper[4869]: E0130 11:05:30.162050 4869 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-zcxm2_crc-storage_5f08cad2-3aae-4ab0-bea9-b9242a2de799_0(ede3c6dfffbfd03bdba544d9d3bbaa75c500dfe1539e8680faad348684f9272c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-zcxm2" Jan 30 11:05:30 crc kubenswrapper[4869]: E0130 11:05:30.162094 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-zcxm2_crc-storage(5f08cad2-3aae-4ab0-bea9-b9242a2de799)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-zcxm2_crc-storage(5f08cad2-3aae-4ab0-bea9-b9242a2de799)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-zcxm2_crc-storage_5f08cad2-3aae-4ab0-bea9-b9242a2de799_0(ede3c6dfffbfd03bdba544d9d3bbaa75c500dfe1539e8680faad348684f9272c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-zcxm2" podUID="5f08cad2-3aae-4ab0-bea9-b9242a2de799" Jan 30 11:05:38 crc kubenswrapper[4869]: I0130 11:05:38.132826 4869 scope.go:117] "RemoveContainer" containerID="789b440a13044433df32646c52cdc72df74090c4be9a71b1135371073ef0683d" Jan 30 11:05:38 crc kubenswrapper[4869]: I0130 11:05:38.621549 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5jpbv_02f48f89-74aa-48e8-930e-7a86f15de2de/kube-multus/2.log" Jan 30 11:05:38 crc kubenswrapper[4869]: I0130 11:05:38.621993 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5jpbv" event={"ID":"02f48f89-74aa-48e8-930e-7a86f15de2de","Type":"ContainerStarted","Data":"7afd646f0c7d6e7c284677abd0e208feb46a4ff52111e7263d108d4524173eb0"} Jan 30 11:05:40 crc kubenswrapper[4869]: I0130 11:05:40.560015 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-468tx" Jan 30 11:05:44 crc kubenswrapper[4869]: I0130 11:05:44.133022 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zcxm2" Jan 30 11:05:44 crc kubenswrapper[4869]: I0130 11:05:44.133515 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zcxm2" Jan 30 11:05:44 crc kubenswrapper[4869]: I0130 11:05:44.536869 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-zcxm2"] Jan 30 11:05:44 crc kubenswrapper[4869]: I0130 11:05:44.546204 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 11:05:44 crc kubenswrapper[4869]: I0130 11:05:44.651523 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-zcxm2" event={"ID":"5f08cad2-3aae-4ab0-bea9-b9242a2de799","Type":"ContainerStarted","Data":"604ac8b4ff80a15bb794e940403541ed03105b498a566228cf00d8810e4ff99f"} Jan 30 11:05:45 crc kubenswrapper[4869]: I0130 11:05:45.658364 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-zcxm2" event={"ID":"5f08cad2-3aae-4ab0-bea9-b9242a2de799","Type":"ContainerStarted","Data":"1ebd791dc091581a1f56fd9769496c84e9cd7d7c4cde3e9e92c4620a2d7c04b4"} Jan 30 11:05:46 crc kubenswrapper[4869]: I0130 11:05:46.665340 4869 generic.go:334] "Generic (PLEG): container finished" podID="5f08cad2-3aae-4ab0-bea9-b9242a2de799" containerID="1ebd791dc091581a1f56fd9769496c84e9cd7d7c4cde3e9e92c4620a2d7c04b4" exitCode=0 Jan 30 11:05:46 crc kubenswrapper[4869]: I0130 11:05:46.665382 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-zcxm2" event={"ID":"5f08cad2-3aae-4ab0-bea9-b9242a2de799","Type":"ContainerDied","Data":"1ebd791dc091581a1f56fd9769496c84e9cd7d7c4cde3e9e92c4620a2d7c04b4"} Jan 30 11:05:47 crc kubenswrapper[4869]: I0130 11:05:47.864957 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zcxm2" Jan 30 11:05:48 crc kubenswrapper[4869]: I0130 11:05:48.034329 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n79f8\" (UniqueName: \"kubernetes.io/projected/5f08cad2-3aae-4ab0-bea9-b9242a2de799-kube-api-access-n79f8\") pod \"5f08cad2-3aae-4ab0-bea9-b9242a2de799\" (UID: \"5f08cad2-3aae-4ab0-bea9-b9242a2de799\") " Jan 30 11:05:48 crc kubenswrapper[4869]: I0130 11:05:48.034394 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/5f08cad2-3aae-4ab0-bea9-b9242a2de799-node-mnt\") pod \"5f08cad2-3aae-4ab0-bea9-b9242a2de799\" (UID: \"5f08cad2-3aae-4ab0-bea9-b9242a2de799\") " Jan 30 11:05:48 crc kubenswrapper[4869]: I0130 11:05:48.034438 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/5f08cad2-3aae-4ab0-bea9-b9242a2de799-crc-storage\") pod \"5f08cad2-3aae-4ab0-bea9-b9242a2de799\" (UID: \"5f08cad2-3aae-4ab0-bea9-b9242a2de799\") " Jan 30 11:05:48 crc kubenswrapper[4869]: I0130 11:05:48.034553 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5f08cad2-3aae-4ab0-bea9-b9242a2de799-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "5f08cad2-3aae-4ab0-bea9-b9242a2de799" (UID: "5f08cad2-3aae-4ab0-bea9-b9242a2de799"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:05:48 crc kubenswrapper[4869]: I0130 11:05:48.034673 4869 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/5f08cad2-3aae-4ab0-bea9-b9242a2de799-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 30 11:05:48 crc kubenswrapper[4869]: I0130 11:05:48.039672 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f08cad2-3aae-4ab0-bea9-b9242a2de799-kube-api-access-n79f8" (OuterVolumeSpecName: "kube-api-access-n79f8") pod "5f08cad2-3aae-4ab0-bea9-b9242a2de799" (UID: "5f08cad2-3aae-4ab0-bea9-b9242a2de799"). InnerVolumeSpecName "kube-api-access-n79f8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:05:48 crc kubenswrapper[4869]: I0130 11:05:48.049004 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f08cad2-3aae-4ab0-bea9-b9242a2de799-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "5f08cad2-3aae-4ab0-bea9-b9242a2de799" (UID: "5f08cad2-3aae-4ab0-bea9-b9242a2de799"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:05:48 crc kubenswrapper[4869]: I0130 11:05:48.135790 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n79f8\" (UniqueName: \"kubernetes.io/projected/5f08cad2-3aae-4ab0-bea9-b9242a2de799-kube-api-access-n79f8\") on node \"crc\" DevicePath \"\"" Jan 30 11:05:48 crc kubenswrapper[4869]: I0130 11:05:48.136218 4869 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/5f08cad2-3aae-4ab0-bea9-b9242a2de799-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 30 11:05:48 crc kubenswrapper[4869]: I0130 11:05:48.677234 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-zcxm2" event={"ID":"5f08cad2-3aae-4ab0-bea9-b9242a2de799","Type":"ContainerDied","Data":"604ac8b4ff80a15bb794e940403541ed03105b498a566228cf00d8810e4ff99f"} Jan 30 11:05:48 crc kubenswrapper[4869]: I0130 11:05:48.677559 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="604ac8b4ff80a15bb794e940403541ed03105b498a566228cf00d8810e4ff99f" Jan 30 11:05:48 crc kubenswrapper[4869]: I0130 11:05:48.677438 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zcxm2" Jan 30 11:05:54 crc kubenswrapper[4869]: I0130 11:05:54.348242 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf"] Jan 30 11:05:54 crc kubenswrapper[4869]: E0130 11:05:54.348701 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f08cad2-3aae-4ab0-bea9-b9242a2de799" containerName="storage" Jan 30 11:05:54 crc kubenswrapper[4869]: I0130 11:05:54.348804 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f08cad2-3aae-4ab0-bea9-b9242a2de799" containerName="storage" Jan 30 11:05:54 crc kubenswrapper[4869]: I0130 11:05:54.348900 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f08cad2-3aae-4ab0-bea9-b9242a2de799" containerName="storage" Jan 30 11:05:54 crc kubenswrapper[4869]: I0130 11:05:54.349568 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf" Jan 30 11:05:54 crc kubenswrapper[4869]: I0130 11:05:54.351929 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 30 11:05:54 crc kubenswrapper[4869]: I0130 11:05:54.360057 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf"] Jan 30 11:05:54 crc kubenswrapper[4869]: I0130 11:05:54.445529 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ed645f02-420a-49d0-8228-02df41ef2808-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf\" (UID: \"ed645f02-420a-49d0-8228-02df41ef2808\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf" Jan 30 11:05:54 crc kubenswrapper[4869]: I0130 11:05:54.445655 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ed645f02-420a-49d0-8228-02df41ef2808-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf\" (UID: \"ed645f02-420a-49d0-8228-02df41ef2808\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf" Jan 30 11:05:54 crc kubenswrapper[4869]: I0130 11:05:54.445701 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fc69z\" (UniqueName: \"kubernetes.io/projected/ed645f02-420a-49d0-8228-02df41ef2808-kube-api-access-fc69z\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf\" (UID: \"ed645f02-420a-49d0-8228-02df41ef2808\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf" Jan 30 11:05:54 crc kubenswrapper[4869]: I0130 11:05:54.546700 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ed645f02-420a-49d0-8228-02df41ef2808-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf\" (UID: \"ed645f02-420a-49d0-8228-02df41ef2808\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf" Jan 30 11:05:54 crc kubenswrapper[4869]: I0130 11:05:54.546787 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ed645f02-420a-49d0-8228-02df41ef2808-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf\" (UID: \"ed645f02-420a-49d0-8228-02df41ef2808\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf" Jan 30 11:05:54 crc kubenswrapper[4869]: I0130 11:05:54.546810 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fc69z\" (UniqueName: \"kubernetes.io/projected/ed645f02-420a-49d0-8228-02df41ef2808-kube-api-access-fc69z\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf\" (UID: \"ed645f02-420a-49d0-8228-02df41ef2808\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf" Jan 30 11:05:54 crc kubenswrapper[4869]: I0130 11:05:54.547259 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ed645f02-420a-49d0-8228-02df41ef2808-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf\" (UID: \"ed645f02-420a-49d0-8228-02df41ef2808\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf" Jan 30 11:05:54 crc kubenswrapper[4869]: I0130 11:05:54.547302 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ed645f02-420a-49d0-8228-02df41ef2808-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf\" (UID: \"ed645f02-420a-49d0-8228-02df41ef2808\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf" Jan 30 11:05:54 crc kubenswrapper[4869]: I0130 11:05:54.568499 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fc69z\" (UniqueName: \"kubernetes.io/projected/ed645f02-420a-49d0-8228-02df41ef2808-kube-api-access-fc69z\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf\" (UID: \"ed645f02-420a-49d0-8228-02df41ef2808\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf" Jan 30 11:05:54 crc kubenswrapper[4869]: I0130 11:05:54.664648 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf" Jan 30 11:05:54 crc kubenswrapper[4869]: I0130 11:05:54.850241 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf"] Jan 30 11:05:54 crc kubenswrapper[4869]: W0130 11:05:54.850535 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poded645f02_420a_49d0_8228_02df41ef2808.slice/crio-1b39e507083be3cffe5e7ce51d3e26adf303b5e94eb69bbf3932a1a8fb86f334 WatchSource:0}: Error finding container 1b39e507083be3cffe5e7ce51d3e26adf303b5e94eb69bbf3932a1a8fb86f334: Status 404 returned error can't find the container with id 1b39e507083be3cffe5e7ce51d3e26adf303b5e94eb69bbf3932a1a8fb86f334 Jan 30 11:05:55 crc kubenswrapper[4869]: I0130 11:05:55.726777 4869 generic.go:334] "Generic (PLEG): container finished" podID="ed645f02-420a-49d0-8228-02df41ef2808" containerID="7d5d6d79ecf82738b1bb2d514d6f5b924cf663429604180c91be68ec748ac8b5" exitCode=0 Jan 30 11:05:55 crc kubenswrapper[4869]: I0130 11:05:55.726818 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf" event={"ID":"ed645f02-420a-49d0-8228-02df41ef2808","Type":"ContainerDied","Data":"7d5d6d79ecf82738b1bb2d514d6f5b924cf663429604180c91be68ec748ac8b5"} Jan 30 11:05:55 crc kubenswrapper[4869]: I0130 11:05:55.726854 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf" event={"ID":"ed645f02-420a-49d0-8228-02df41ef2808","Type":"ContainerStarted","Data":"1b39e507083be3cffe5e7ce51d3e26adf303b5e94eb69bbf3932a1a8fb86f334"} Jan 30 11:05:57 crc kubenswrapper[4869]: I0130 11:05:57.737341 4869 generic.go:334] "Generic (PLEG): container finished" podID="ed645f02-420a-49d0-8228-02df41ef2808" containerID="7201dcce9a8dbc106734f73a5a782bcdbb94a9247397aa96e78d4ad1648ced39" exitCode=0 Jan 30 11:05:57 crc kubenswrapper[4869]: I0130 11:05:57.737536 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf" event={"ID":"ed645f02-420a-49d0-8228-02df41ef2808","Type":"ContainerDied","Data":"7201dcce9a8dbc106734f73a5a782bcdbb94a9247397aa96e78d4ad1648ced39"} Jan 30 11:05:58 crc kubenswrapper[4869]: I0130 11:05:58.744446 4869 generic.go:334] "Generic (PLEG): container finished" podID="ed645f02-420a-49d0-8228-02df41ef2808" containerID="9928b50fbac3a1e6b502354b85a63bd4a14a164e3aec8f73a23abe2ad902f83c" exitCode=0 Jan 30 11:05:58 crc kubenswrapper[4869]: I0130 11:05:58.744531 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf" event={"ID":"ed645f02-420a-49d0-8228-02df41ef2808","Type":"ContainerDied","Data":"9928b50fbac3a1e6b502354b85a63bd4a14a164e3aec8f73a23abe2ad902f83c"} Jan 30 11:05:59 crc kubenswrapper[4869]: I0130 11:05:59.967449 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf" Jan 30 11:06:00 crc kubenswrapper[4869]: I0130 11:06:00.010319 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ed645f02-420a-49d0-8228-02df41ef2808-bundle\") pod \"ed645f02-420a-49d0-8228-02df41ef2808\" (UID: \"ed645f02-420a-49d0-8228-02df41ef2808\") " Jan 30 11:06:00 crc kubenswrapper[4869]: I0130 11:06:00.010750 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fc69z\" (UniqueName: \"kubernetes.io/projected/ed645f02-420a-49d0-8228-02df41ef2808-kube-api-access-fc69z\") pod \"ed645f02-420a-49d0-8228-02df41ef2808\" (UID: \"ed645f02-420a-49d0-8228-02df41ef2808\") " Jan 30 11:06:00 crc kubenswrapper[4869]: I0130 11:06:00.010898 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ed645f02-420a-49d0-8228-02df41ef2808-util\") pod \"ed645f02-420a-49d0-8228-02df41ef2808\" (UID: \"ed645f02-420a-49d0-8228-02df41ef2808\") " Jan 30 11:06:00 crc kubenswrapper[4869]: I0130 11:06:00.011080 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed645f02-420a-49d0-8228-02df41ef2808-bundle" (OuterVolumeSpecName: "bundle") pod "ed645f02-420a-49d0-8228-02df41ef2808" (UID: "ed645f02-420a-49d0-8228-02df41ef2808"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:06:00 crc kubenswrapper[4869]: I0130 11:06:00.011239 4869 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ed645f02-420a-49d0-8228-02df41ef2808-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:06:00 crc kubenswrapper[4869]: I0130 11:06:00.024581 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed645f02-420a-49d0-8228-02df41ef2808-util" (OuterVolumeSpecName: "util") pod "ed645f02-420a-49d0-8228-02df41ef2808" (UID: "ed645f02-420a-49d0-8228-02df41ef2808"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:06:00 crc kubenswrapper[4869]: I0130 11:06:00.029151 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed645f02-420a-49d0-8228-02df41ef2808-kube-api-access-fc69z" (OuterVolumeSpecName: "kube-api-access-fc69z") pod "ed645f02-420a-49d0-8228-02df41ef2808" (UID: "ed645f02-420a-49d0-8228-02df41ef2808"). InnerVolumeSpecName "kube-api-access-fc69z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:06:00 crc kubenswrapper[4869]: I0130 11:06:00.112204 4869 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ed645f02-420a-49d0-8228-02df41ef2808-util\") on node \"crc\" DevicePath \"\"" Jan 30 11:06:00 crc kubenswrapper[4869]: I0130 11:06:00.112262 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fc69z\" (UniqueName: \"kubernetes.io/projected/ed645f02-420a-49d0-8228-02df41ef2808-kube-api-access-fc69z\") on node \"crc\" DevicePath \"\"" Jan 30 11:06:00 crc kubenswrapper[4869]: I0130 11:06:00.754667 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf" event={"ID":"ed645f02-420a-49d0-8228-02df41ef2808","Type":"ContainerDied","Data":"1b39e507083be3cffe5e7ce51d3e26adf303b5e94eb69bbf3932a1a8fb86f334"} Jan 30 11:06:00 crc kubenswrapper[4869]: I0130 11:06:00.754714 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1b39e507083be3cffe5e7ce51d3e26adf303b5e94eb69bbf3932a1a8fb86f334" Jan 30 11:06:00 crc kubenswrapper[4869]: I0130 11:06:00.754792 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf" Jan 30 11:06:02 crc kubenswrapper[4869]: I0130 11:06:02.110314 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-qx2rr"] Jan 30 11:06:02 crc kubenswrapper[4869]: E0130 11:06:02.110563 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed645f02-420a-49d0-8228-02df41ef2808" containerName="extract" Jan 30 11:06:02 crc kubenswrapper[4869]: I0130 11:06:02.110581 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed645f02-420a-49d0-8228-02df41ef2808" containerName="extract" Jan 30 11:06:02 crc kubenswrapper[4869]: E0130 11:06:02.110593 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed645f02-420a-49d0-8228-02df41ef2808" containerName="pull" Jan 30 11:06:02 crc kubenswrapper[4869]: I0130 11:06:02.110600 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed645f02-420a-49d0-8228-02df41ef2808" containerName="pull" Jan 30 11:06:02 crc kubenswrapper[4869]: E0130 11:06:02.110617 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed645f02-420a-49d0-8228-02df41ef2808" containerName="util" Jan 30 11:06:02 crc kubenswrapper[4869]: I0130 11:06:02.110626 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed645f02-420a-49d0-8228-02df41ef2808" containerName="util" Jan 30 11:06:02 crc kubenswrapper[4869]: I0130 11:06:02.110768 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed645f02-420a-49d0-8228-02df41ef2808" containerName="extract" Jan 30 11:06:02 crc kubenswrapper[4869]: I0130 11:06:02.111210 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-qx2rr" Jan 30 11:06:02 crc kubenswrapper[4869]: I0130 11:06:02.113297 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-crh8d" Jan 30 11:06:02 crc kubenswrapper[4869]: I0130 11:06:02.113423 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Jan 30 11:06:02 crc kubenswrapper[4869]: I0130 11:06:02.114831 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Jan 30 11:06:02 crc kubenswrapper[4869]: I0130 11:06:02.121614 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-qx2rr"] Jan 30 11:06:02 crc kubenswrapper[4869]: I0130 11:06:02.263683 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fd6xs\" (UniqueName: \"kubernetes.io/projected/89dc09e0-cece-4665-b6cf-5cd9fa7ea314-kube-api-access-fd6xs\") pod \"nmstate-operator-646758c888-qx2rr\" (UID: \"89dc09e0-cece-4665-b6cf-5cd9fa7ea314\") " pod="openshift-nmstate/nmstate-operator-646758c888-qx2rr" Jan 30 11:06:02 crc kubenswrapper[4869]: I0130 11:06:02.365074 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fd6xs\" (UniqueName: \"kubernetes.io/projected/89dc09e0-cece-4665-b6cf-5cd9fa7ea314-kube-api-access-fd6xs\") pod \"nmstate-operator-646758c888-qx2rr\" (UID: \"89dc09e0-cece-4665-b6cf-5cd9fa7ea314\") " pod="openshift-nmstate/nmstate-operator-646758c888-qx2rr" Jan 30 11:06:02 crc kubenswrapper[4869]: I0130 11:06:02.382447 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fd6xs\" (UniqueName: \"kubernetes.io/projected/89dc09e0-cece-4665-b6cf-5cd9fa7ea314-kube-api-access-fd6xs\") pod \"nmstate-operator-646758c888-qx2rr\" (UID: \"89dc09e0-cece-4665-b6cf-5cd9fa7ea314\") " pod="openshift-nmstate/nmstate-operator-646758c888-qx2rr" Jan 30 11:06:02 crc kubenswrapper[4869]: I0130 11:06:02.426750 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-qx2rr" Jan 30 11:06:02 crc kubenswrapper[4869]: I0130 11:06:02.611568 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-qx2rr"] Jan 30 11:06:02 crc kubenswrapper[4869]: I0130 11:06:02.768466 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-qx2rr" event={"ID":"89dc09e0-cece-4665-b6cf-5cd9fa7ea314","Type":"ContainerStarted","Data":"491befdde30d6ab107ce2d9cfa548070fc4540ea31958c1d4e0e527c76b53deb"} Jan 30 11:06:05 crc kubenswrapper[4869]: I0130 11:06:05.783961 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-qx2rr" event={"ID":"89dc09e0-cece-4665-b6cf-5cd9fa7ea314","Type":"ContainerStarted","Data":"d384071e121cb6bbc8d928e9c2f42a52a4199dfbd5d00911146a8f2db55595e6"} Jan 30 11:06:05 crc kubenswrapper[4869]: I0130 11:06:05.799192 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-646758c888-qx2rr" podStartSLOduration=1.617487983 podStartE2EDuration="3.799172222s" podCreationTimestamp="2026-01-30 11:06:02 +0000 UTC" firstStartedPulling="2026-01-30 11:06:02.623927134 +0000 UTC m=+713.173803200" lastFinishedPulling="2026-01-30 11:06:04.805611373 +0000 UTC m=+715.355487439" observedRunningTime="2026-01-30 11:06:05.796768894 +0000 UTC m=+716.346644990" watchObservedRunningTime="2026-01-30 11:06:05.799172222 +0000 UTC m=+716.349048308" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.642591 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-6rnll"] Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.643902 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-6rnll" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.650277 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-7n7pb"] Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.651107 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7n7pb" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.658786 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-4xqm9" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.669461 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.673434 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-6rnll"] Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.705721 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-7n7pb"] Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.715363 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smfxj\" (UniqueName: \"kubernetes.io/projected/2b36143a-20b8-40d9-a94f-ba14118e00bc-kube-api-access-smfxj\") pod \"nmstate-metrics-54757c584b-6rnll\" (UID: \"2b36143a-20b8-40d9-a94f-ba14118e00bc\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-6rnll" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.717634 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-jd8kn"] Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.721873 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-jd8kn" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.817087 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9n65\" (UniqueName: \"kubernetes.io/projected/d6be4693-290f-45f6-8783-4e28ab1e4578-kube-api-access-f9n65\") pod \"nmstate-webhook-8474b5b9d8-7n7pb\" (UID: \"d6be4693-290f-45f6-8783-4e28ab1e4578\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7n7pb" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.817138 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/ab78e821-7f8a-43c5-a857-3694754330a1-dbus-socket\") pod \"nmstate-handler-jd8kn\" (UID: \"ab78e821-7f8a-43c5-a857-3694754330a1\") " pod="openshift-nmstate/nmstate-handler-jd8kn" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.817192 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/ab78e821-7f8a-43c5-a857-3694754330a1-nmstate-lock\") pod \"nmstate-handler-jd8kn\" (UID: \"ab78e821-7f8a-43c5-a857-3694754330a1\") " pod="openshift-nmstate/nmstate-handler-jd8kn" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.817221 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smfxj\" (UniqueName: \"kubernetes.io/projected/2b36143a-20b8-40d9-a94f-ba14118e00bc-kube-api-access-smfxj\") pod \"nmstate-metrics-54757c584b-6rnll\" (UID: \"2b36143a-20b8-40d9-a94f-ba14118e00bc\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-6rnll" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.817274 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wngl6\" (UniqueName: \"kubernetes.io/projected/ab78e821-7f8a-43c5-a857-3694754330a1-kube-api-access-wngl6\") pod \"nmstate-handler-jd8kn\" (UID: \"ab78e821-7f8a-43c5-a857-3694754330a1\") " pod="openshift-nmstate/nmstate-handler-jd8kn" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.817298 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/d6be4693-290f-45f6-8783-4e28ab1e4578-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-7n7pb\" (UID: \"d6be4693-290f-45f6-8783-4e28ab1e4578\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7n7pb" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.817331 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/ab78e821-7f8a-43c5-a857-3694754330a1-ovs-socket\") pod \"nmstate-handler-jd8kn\" (UID: \"ab78e821-7f8a-43c5-a857-3694754330a1\") " pod="openshift-nmstate/nmstate-handler-jd8kn" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.836349 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-vpv9w"] Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.837004 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-vpv9w" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.838686 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.838694 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.838808 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-rcbf5" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.845391 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smfxj\" (UniqueName: \"kubernetes.io/projected/2b36143a-20b8-40d9-a94f-ba14118e00bc-kube-api-access-smfxj\") pod \"nmstate-metrics-54757c584b-6rnll\" (UID: \"2b36143a-20b8-40d9-a94f-ba14118e00bc\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-6rnll" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.848895 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-vpv9w"] Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.918426 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/0fb8bb1d-8d05-4b95-a466-40fabf706e11-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-vpv9w\" (UID: \"0fb8bb1d-8d05-4b95-a466-40fabf706e11\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-vpv9w" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.918489 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9n65\" (UniqueName: \"kubernetes.io/projected/d6be4693-290f-45f6-8783-4e28ab1e4578-kube-api-access-f9n65\") pod \"nmstate-webhook-8474b5b9d8-7n7pb\" (UID: \"d6be4693-290f-45f6-8783-4e28ab1e4578\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7n7pb" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.918515 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/ab78e821-7f8a-43c5-a857-3694754330a1-dbus-socket\") pod \"nmstate-handler-jd8kn\" (UID: \"ab78e821-7f8a-43c5-a857-3694754330a1\") " pod="openshift-nmstate/nmstate-handler-jd8kn" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.918547 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/ab78e821-7f8a-43c5-a857-3694754330a1-nmstate-lock\") pod \"nmstate-handler-jd8kn\" (UID: \"ab78e821-7f8a-43c5-a857-3694754330a1\") " pod="openshift-nmstate/nmstate-handler-jd8kn" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.918573 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/0fb8bb1d-8d05-4b95-a466-40fabf706e11-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-vpv9w\" (UID: \"0fb8bb1d-8d05-4b95-a466-40fabf706e11\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-vpv9w" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.918600 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpw2g\" (UniqueName: \"kubernetes.io/projected/0fb8bb1d-8d05-4b95-a466-40fabf706e11-kube-api-access-xpw2g\") pod \"nmstate-console-plugin-7754f76f8b-vpv9w\" (UID: \"0fb8bb1d-8d05-4b95-a466-40fabf706e11\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-vpv9w" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.918627 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/d6be4693-290f-45f6-8783-4e28ab1e4578-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-7n7pb\" (UID: \"d6be4693-290f-45f6-8783-4e28ab1e4578\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7n7pb" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.918643 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wngl6\" (UniqueName: \"kubernetes.io/projected/ab78e821-7f8a-43c5-a857-3694754330a1-kube-api-access-wngl6\") pod \"nmstate-handler-jd8kn\" (UID: \"ab78e821-7f8a-43c5-a857-3694754330a1\") " pod="openshift-nmstate/nmstate-handler-jd8kn" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.918676 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/ab78e821-7f8a-43c5-a857-3694754330a1-ovs-socket\") pod \"nmstate-handler-jd8kn\" (UID: \"ab78e821-7f8a-43c5-a857-3694754330a1\") " pod="openshift-nmstate/nmstate-handler-jd8kn" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.918773 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/ab78e821-7f8a-43c5-a857-3694754330a1-ovs-socket\") pod \"nmstate-handler-jd8kn\" (UID: \"ab78e821-7f8a-43c5-a857-3694754330a1\") " pod="openshift-nmstate/nmstate-handler-jd8kn" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.919144 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/ab78e821-7f8a-43c5-a857-3694754330a1-nmstate-lock\") pod \"nmstate-handler-jd8kn\" (UID: \"ab78e821-7f8a-43c5-a857-3694754330a1\") " pod="openshift-nmstate/nmstate-handler-jd8kn" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.919202 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/ab78e821-7f8a-43c5-a857-3694754330a1-dbus-socket\") pod \"nmstate-handler-jd8kn\" (UID: \"ab78e821-7f8a-43c5-a857-3694754330a1\") " pod="openshift-nmstate/nmstate-handler-jd8kn" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.923328 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/d6be4693-290f-45f6-8783-4e28ab1e4578-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-7n7pb\" (UID: \"d6be4693-290f-45f6-8783-4e28ab1e4578\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7n7pb" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.935865 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9n65\" (UniqueName: \"kubernetes.io/projected/d6be4693-290f-45f6-8783-4e28ab1e4578-kube-api-access-f9n65\") pod \"nmstate-webhook-8474b5b9d8-7n7pb\" (UID: \"d6be4693-290f-45f6-8783-4e28ab1e4578\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7n7pb" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.937685 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wngl6\" (UniqueName: \"kubernetes.io/projected/ab78e821-7f8a-43c5-a857-3694754330a1-kube-api-access-wngl6\") pod \"nmstate-handler-jd8kn\" (UID: \"ab78e821-7f8a-43c5-a857-3694754330a1\") " pod="openshift-nmstate/nmstate-handler-jd8kn" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.962546 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-6rnll" Jan 30 11:06:06 crc kubenswrapper[4869]: I0130 11:06:06.975193 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7n7pb" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.020247 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/0fb8bb1d-8d05-4b95-a466-40fabf706e11-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-vpv9w\" (UID: \"0fb8bb1d-8d05-4b95-a466-40fabf706e11\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-vpv9w" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.020338 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/0fb8bb1d-8d05-4b95-a466-40fabf706e11-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-vpv9w\" (UID: \"0fb8bb1d-8d05-4b95-a466-40fabf706e11\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-vpv9w" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.020360 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpw2g\" (UniqueName: \"kubernetes.io/projected/0fb8bb1d-8d05-4b95-a466-40fabf706e11-kube-api-access-xpw2g\") pod \"nmstate-console-plugin-7754f76f8b-vpv9w\" (UID: \"0fb8bb1d-8d05-4b95-a466-40fabf706e11\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-vpv9w" Jan 30 11:06:07 crc kubenswrapper[4869]: E0130 11:06:07.021085 4869 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Jan 30 11:06:07 crc kubenswrapper[4869]: E0130 11:06:07.021181 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0fb8bb1d-8d05-4b95-a466-40fabf706e11-plugin-serving-cert podName:0fb8bb1d-8d05-4b95-a466-40fabf706e11 nodeName:}" failed. No retries permitted until 2026-01-30 11:06:07.521163283 +0000 UTC m=+718.071039349 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/0fb8bb1d-8d05-4b95-a466-40fabf706e11-plugin-serving-cert") pod "nmstate-console-plugin-7754f76f8b-vpv9w" (UID: "0fb8bb1d-8d05-4b95-a466-40fabf706e11") : secret "plugin-serving-cert" not found Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.021429 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/0fb8bb1d-8d05-4b95-a466-40fabf706e11-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-vpv9w\" (UID: \"0fb8bb1d-8d05-4b95-a466-40fabf706e11\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-vpv9w" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.039264 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-7bb979c75c-jcnt5"] Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.039487 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-jd8kn" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.040177 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.046963 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpw2g\" (UniqueName: \"kubernetes.io/projected/0fb8bb1d-8d05-4b95-a466-40fabf706e11-kube-api-access-xpw2g\") pod \"nmstate-console-plugin-7754f76f8b-vpv9w\" (UID: \"0fb8bb1d-8d05-4b95-a466-40fabf706e11\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-vpv9w" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.053159 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7bb979c75c-jcnt5"] Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.121171 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9f154be6-056e-4128-bdcf-79ebc2a76b70-trusted-ca-bundle\") pod \"console-7bb979c75c-jcnt5\" (UID: \"9f154be6-056e-4128-bdcf-79ebc2a76b70\") " pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.121508 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9f154be6-056e-4128-bdcf-79ebc2a76b70-console-oauth-config\") pod \"console-7bb979c75c-jcnt5\" (UID: \"9f154be6-056e-4128-bdcf-79ebc2a76b70\") " pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.121556 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9f154be6-056e-4128-bdcf-79ebc2a76b70-console-config\") pod \"console-7bb979c75c-jcnt5\" (UID: \"9f154be6-056e-4128-bdcf-79ebc2a76b70\") " pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.121626 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9f154be6-056e-4128-bdcf-79ebc2a76b70-service-ca\") pod \"console-7bb979c75c-jcnt5\" (UID: \"9f154be6-056e-4128-bdcf-79ebc2a76b70\") " pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.121654 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9f154be6-056e-4128-bdcf-79ebc2a76b70-oauth-serving-cert\") pod \"console-7bb979c75c-jcnt5\" (UID: \"9f154be6-056e-4128-bdcf-79ebc2a76b70\") " pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.121674 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9f154be6-056e-4128-bdcf-79ebc2a76b70-console-serving-cert\") pod \"console-7bb979c75c-jcnt5\" (UID: \"9f154be6-056e-4128-bdcf-79ebc2a76b70\") " pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.121777 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxw49\" (UniqueName: \"kubernetes.io/projected/9f154be6-056e-4128-bdcf-79ebc2a76b70-kube-api-access-rxw49\") pod \"console-7bb979c75c-jcnt5\" (UID: \"9f154be6-056e-4128-bdcf-79ebc2a76b70\") " pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.194196 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-7n7pb"] Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.222732 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9f154be6-056e-4128-bdcf-79ebc2a76b70-console-oauth-config\") pod \"console-7bb979c75c-jcnt5\" (UID: \"9f154be6-056e-4128-bdcf-79ebc2a76b70\") " pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.222774 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9f154be6-056e-4128-bdcf-79ebc2a76b70-console-config\") pod \"console-7bb979c75c-jcnt5\" (UID: \"9f154be6-056e-4128-bdcf-79ebc2a76b70\") " pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.222849 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9f154be6-056e-4128-bdcf-79ebc2a76b70-service-ca\") pod \"console-7bb979c75c-jcnt5\" (UID: \"9f154be6-056e-4128-bdcf-79ebc2a76b70\") " pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.222905 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9f154be6-056e-4128-bdcf-79ebc2a76b70-oauth-serving-cert\") pod \"console-7bb979c75c-jcnt5\" (UID: \"9f154be6-056e-4128-bdcf-79ebc2a76b70\") " pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.222937 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9f154be6-056e-4128-bdcf-79ebc2a76b70-console-serving-cert\") pod \"console-7bb979c75c-jcnt5\" (UID: \"9f154be6-056e-4128-bdcf-79ebc2a76b70\") " pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.222985 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxw49\" (UniqueName: \"kubernetes.io/projected/9f154be6-056e-4128-bdcf-79ebc2a76b70-kube-api-access-rxw49\") pod \"console-7bb979c75c-jcnt5\" (UID: \"9f154be6-056e-4128-bdcf-79ebc2a76b70\") " pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.223016 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9f154be6-056e-4128-bdcf-79ebc2a76b70-trusted-ca-bundle\") pod \"console-7bb979c75c-jcnt5\" (UID: \"9f154be6-056e-4128-bdcf-79ebc2a76b70\") " pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.224579 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9f154be6-056e-4128-bdcf-79ebc2a76b70-trusted-ca-bundle\") pod \"console-7bb979c75c-jcnt5\" (UID: \"9f154be6-056e-4128-bdcf-79ebc2a76b70\") " pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.225915 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9f154be6-056e-4128-bdcf-79ebc2a76b70-oauth-serving-cert\") pod \"console-7bb979c75c-jcnt5\" (UID: \"9f154be6-056e-4128-bdcf-79ebc2a76b70\") " pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.226520 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9f154be6-056e-4128-bdcf-79ebc2a76b70-console-config\") pod \"console-7bb979c75c-jcnt5\" (UID: \"9f154be6-056e-4128-bdcf-79ebc2a76b70\") " pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.226545 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9f154be6-056e-4128-bdcf-79ebc2a76b70-service-ca\") pod \"console-7bb979c75c-jcnt5\" (UID: \"9f154be6-056e-4128-bdcf-79ebc2a76b70\") " pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.234480 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9f154be6-056e-4128-bdcf-79ebc2a76b70-console-serving-cert\") pod \"console-7bb979c75c-jcnt5\" (UID: \"9f154be6-056e-4128-bdcf-79ebc2a76b70\") " pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.234956 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9f154be6-056e-4128-bdcf-79ebc2a76b70-console-oauth-config\") pod \"console-7bb979c75c-jcnt5\" (UID: \"9f154be6-056e-4128-bdcf-79ebc2a76b70\") " pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.243474 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxw49\" (UniqueName: \"kubernetes.io/projected/9f154be6-056e-4128-bdcf-79ebc2a76b70-kube-api-access-rxw49\") pod \"console-7bb979c75c-jcnt5\" (UID: \"9f154be6-056e-4128-bdcf-79ebc2a76b70\") " pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.363272 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.449555 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-6rnll"] Jan 30 11:06:07 crc kubenswrapper[4869]: W0130 11:06:07.458452 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b36143a_20b8_40d9_a94f_ba14118e00bc.slice/crio-4ea1e7b264aff975023e35e35031f86a64cf1cb9e9e07484b775ec19923a7f9c WatchSource:0}: Error finding container 4ea1e7b264aff975023e35e35031f86a64cf1cb9e9e07484b775ec19923a7f9c: Status 404 returned error can't find the container with id 4ea1e7b264aff975023e35e35031f86a64cf1cb9e9e07484b775ec19923a7f9c Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.526693 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/0fb8bb1d-8d05-4b95-a466-40fabf706e11-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-vpv9w\" (UID: \"0fb8bb1d-8d05-4b95-a466-40fabf706e11\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-vpv9w" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.529642 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/0fb8bb1d-8d05-4b95-a466-40fabf706e11-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-vpv9w\" (UID: \"0fb8bb1d-8d05-4b95-a466-40fabf706e11\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-vpv9w" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.766699 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7bb979c75c-jcnt5"] Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.776876 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-vpv9w" Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.794214 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-jd8kn" event={"ID":"ab78e821-7f8a-43c5-a857-3694754330a1","Type":"ContainerStarted","Data":"5e5729cd7189c76f9052fb001c8543e2368cf563bb987b9da7aca05ba2ca5f3a"} Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.797651 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7bb979c75c-jcnt5" event={"ID":"9f154be6-056e-4128-bdcf-79ebc2a76b70","Type":"ContainerStarted","Data":"f85aef52db6ed5e8fa044be17e0b396d58893852ec0bceba124f9a751e2ebe3f"} Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.798837 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7n7pb" event={"ID":"d6be4693-290f-45f6-8783-4e28ab1e4578","Type":"ContainerStarted","Data":"172dc0f90e14bb9b2ce07635f8a864412ea3ca285cdabbcb9dbbf66b5abcb652"} Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.799751 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-6rnll" event={"ID":"2b36143a-20b8-40d9-a94f-ba14118e00bc","Type":"ContainerStarted","Data":"4ea1e7b264aff975023e35e35031f86a64cf1cb9e9e07484b775ec19923a7f9c"} Jan 30 11:06:07 crc kubenswrapper[4869]: I0130 11:06:07.946278 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-vpv9w"] Jan 30 11:06:07 crc kubenswrapper[4869]: W0130 11:06:07.955554 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0fb8bb1d_8d05_4b95_a466_40fabf706e11.slice/crio-1ed180d3528ef6ebab37755f1b90686a86dea1c79db04e8a8e6a6e5e1464fc9a WatchSource:0}: Error finding container 1ed180d3528ef6ebab37755f1b90686a86dea1c79db04e8a8e6a6e5e1464fc9a: Status 404 returned error can't find the container with id 1ed180d3528ef6ebab37755f1b90686a86dea1c79db04e8a8e6a6e5e1464fc9a Jan 30 11:06:08 crc kubenswrapper[4869]: I0130 11:06:08.807984 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7bb979c75c-jcnt5" event={"ID":"9f154be6-056e-4128-bdcf-79ebc2a76b70","Type":"ContainerStarted","Data":"170735dace5525130b5e32170ace20b5789d00c950d2fbe74ff0cf4f71554825"} Jan 30 11:06:08 crc kubenswrapper[4869]: I0130 11:06:08.810351 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-vpv9w" event={"ID":"0fb8bb1d-8d05-4b95-a466-40fabf706e11","Type":"ContainerStarted","Data":"1ed180d3528ef6ebab37755f1b90686a86dea1c79db04e8a8e6a6e5e1464fc9a"} Jan 30 11:06:08 crc kubenswrapper[4869]: I0130 11:06:08.825843 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-7bb979c75c-jcnt5" podStartSLOduration=1.825824638 podStartE2EDuration="1.825824638s" podCreationTimestamp="2026-01-30 11:06:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:06:08.822565685 +0000 UTC m=+719.372441761" watchObservedRunningTime="2026-01-30 11:06:08.825824638 +0000 UTC m=+719.375700704" Jan 30 11:06:09 crc kubenswrapper[4869]: I0130 11:06:09.822166 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-6rnll" event={"ID":"2b36143a-20b8-40d9-a94f-ba14118e00bc","Type":"ContainerStarted","Data":"93023ea83313ae9a7a6cd475b7d29e708f5e3b3c010e71c8d338de248846241e"} Jan 30 11:06:09 crc kubenswrapper[4869]: I0130 11:06:09.824763 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-jd8kn" event={"ID":"ab78e821-7f8a-43c5-a857-3694754330a1","Type":"ContainerStarted","Data":"4214959d57d67716c97c4868e2002985f8d25f3e82d098b280bc5bcd7b4a6684"} Jan 30 11:06:09 crc kubenswrapper[4869]: I0130 11:06:09.824824 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-jd8kn" Jan 30 11:06:09 crc kubenswrapper[4869]: I0130 11:06:09.827002 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7n7pb" event={"ID":"d6be4693-290f-45f6-8783-4e28ab1e4578","Type":"ContainerStarted","Data":"2f24d66287f8823762eb0d37bdc4f10dd2067834d9c6782521bf4fb897292d5e"} Jan 30 11:06:09 crc kubenswrapper[4869]: I0130 11:06:09.827106 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7n7pb" Jan 30 11:06:09 crc kubenswrapper[4869]: I0130 11:06:09.850317 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-jd8kn" podStartSLOduration=1.579730504 podStartE2EDuration="3.850294506s" podCreationTimestamp="2026-01-30 11:06:06 +0000 UTC" firstStartedPulling="2026-01-30 11:06:07.102320829 +0000 UTC m=+717.652196895" lastFinishedPulling="2026-01-30 11:06:09.372884841 +0000 UTC m=+719.922760897" observedRunningTime="2026-01-30 11:06:09.841961489 +0000 UTC m=+720.391837545" watchObservedRunningTime="2026-01-30 11:06:09.850294506 +0000 UTC m=+720.400170572" Jan 30 11:06:09 crc kubenswrapper[4869]: I0130 11:06:09.859209 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7n7pb" podStartSLOduration=1.659175511 podStartE2EDuration="3.859190629s" podCreationTimestamp="2026-01-30 11:06:06 +0000 UTC" firstStartedPulling="2026-01-30 11:06:07.199634594 +0000 UTC m=+717.749510660" lastFinishedPulling="2026-01-30 11:06:09.399649702 +0000 UTC m=+719.949525778" observedRunningTime="2026-01-30 11:06:09.856266046 +0000 UTC m=+720.406142122" watchObservedRunningTime="2026-01-30 11:06:09.859190629 +0000 UTC m=+720.409066695" Jan 30 11:06:10 crc kubenswrapper[4869]: I0130 11:06:10.833967 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-vpv9w" event={"ID":"0fb8bb1d-8d05-4b95-a466-40fabf706e11","Type":"ContainerStarted","Data":"7efb957e5b8dd91c098afb0694c96352dd7ac93f6c73f789b542b38c89d3d928"} Jan 30 11:06:10 crc kubenswrapper[4869]: I0130 11:06:10.862001 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-vpv9w" podStartSLOduration=2.461010763 podStartE2EDuration="4.861979301s" podCreationTimestamp="2026-01-30 11:06:06 +0000 UTC" firstStartedPulling="2026-01-30 11:06:07.959840193 +0000 UTC m=+718.509716259" lastFinishedPulling="2026-01-30 11:06:10.360808731 +0000 UTC m=+720.910684797" observedRunningTime="2026-01-30 11:06:10.858209434 +0000 UTC m=+721.408085500" watchObservedRunningTime="2026-01-30 11:06:10.861979301 +0000 UTC m=+721.411855357" Jan 30 11:06:12 crc kubenswrapper[4869]: I0130 11:06:12.845945 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-6rnll" event={"ID":"2b36143a-20b8-40d9-a94f-ba14118e00bc","Type":"ContainerStarted","Data":"9a314211f3214f1b09dc0c7de2776493890c9977dc19cc92363fe58f555d5289"} Jan 30 11:06:12 crc kubenswrapper[4869]: I0130 11:06:12.870638 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-54757c584b-6rnll" podStartSLOduration=2.468885576 podStartE2EDuration="6.870613051s" podCreationTimestamp="2026-01-30 11:06:06 +0000 UTC" firstStartedPulling="2026-01-30 11:06:07.464102858 +0000 UTC m=+718.013978924" lastFinishedPulling="2026-01-30 11:06:11.865830333 +0000 UTC m=+722.415706399" observedRunningTime="2026-01-30 11:06:12.864625021 +0000 UTC m=+723.414501107" watchObservedRunningTime="2026-01-30 11:06:12.870613051 +0000 UTC m=+723.420489117" Jan 30 11:06:17 crc kubenswrapper[4869]: I0130 11:06:17.060014 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-jd8kn" Jan 30 11:06:17 crc kubenswrapper[4869]: I0130 11:06:17.364115 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:17 crc kubenswrapper[4869]: I0130 11:06:17.364208 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:17 crc kubenswrapper[4869]: I0130 11:06:17.369493 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:17 crc kubenswrapper[4869]: I0130 11:06:17.876863 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-7bb979c75c-jcnt5" Jan 30 11:06:17 crc kubenswrapper[4869]: I0130 11:06:17.926404 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-z8qjp"] Jan 30 11:06:26 crc kubenswrapper[4869]: I0130 11:06:26.980965 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7n7pb" Jan 30 11:06:37 crc kubenswrapper[4869]: I0130 11:06:37.006194 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t"] Jan 30 11:06:37 crc kubenswrapper[4869]: I0130 11:06:37.008032 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t" Jan 30 11:06:37 crc kubenswrapper[4869]: I0130 11:06:37.010822 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 30 11:06:37 crc kubenswrapper[4869]: I0130 11:06:37.020241 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t"] Jan 30 11:06:37 crc kubenswrapper[4869]: I0130 11:06:37.115613 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d77c8e13-cf19-438b-9d62-575041c50699-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t\" (UID: \"d77c8e13-cf19-438b-9d62-575041c50699\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t" Jan 30 11:06:37 crc kubenswrapper[4869]: I0130 11:06:37.115680 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ff6fk\" (UniqueName: \"kubernetes.io/projected/d77c8e13-cf19-438b-9d62-575041c50699-kube-api-access-ff6fk\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t\" (UID: \"d77c8e13-cf19-438b-9d62-575041c50699\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t" Jan 30 11:06:37 crc kubenswrapper[4869]: I0130 11:06:37.115825 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d77c8e13-cf19-438b-9d62-575041c50699-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t\" (UID: \"d77c8e13-cf19-438b-9d62-575041c50699\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t" Jan 30 11:06:37 crc kubenswrapper[4869]: I0130 11:06:37.216657 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d77c8e13-cf19-438b-9d62-575041c50699-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t\" (UID: \"d77c8e13-cf19-438b-9d62-575041c50699\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t" Jan 30 11:06:37 crc kubenswrapper[4869]: I0130 11:06:37.216784 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d77c8e13-cf19-438b-9d62-575041c50699-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t\" (UID: \"d77c8e13-cf19-438b-9d62-575041c50699\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t" Jan 30 11:06:37 crc kubenswrapper[4869]: I0130 11:06:37.216838 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ff6fk\" (UniqueName: \"kubernetes.io/projected/d77c8e13-cf19-438b-9d62-575041c50699-kube-api-access-ff6fk\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t\" (UID: \"d77c8e13-cf19-438b-9d62-575041c50699\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t" Jan 30 11:06:37 crc kubenswrapper[4869]: I0130 11:06:37.217407 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d77c8e13-cf19-438b-9d62-575041c50699-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t\" (UID: \"d77c8e13-cf19-438b-9d62-575041c50699\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t" Jan 30 11:06:37 crc kubenswrapper[4869]: I0130 11:06:37.217407 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d77c8e13-cf19-438b-9d62-575041c50699-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t\" (UID: \"d77c8e13-cf19-438b-9d62-575041c50699\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t" Jan 30 11:06:37 crc kubenswrapper[4869]: I0130 11:06:37.240702 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ff6fk\" (UniqueName: \"kubernetes.io/projected/d77c8e13-cf19-438b-9d62-575041c50699-kube-api-access-ff6fk\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t\" (UID: \"d77c8e13-cf19-438b-9d62-575041c50699\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t" Jan 30 11:06:37 crc kubenswrapper[4869]: I0130 11:06:37.362746 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t" Jan 30 11:06:37 crc kubenswrapper[4869]: I0130 11:06:37.554454 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t"] Jan 30 11:06:37 crc kubenswrapper[4869]: I0130 11:06:37.982848 4869 generic.go:334] "Generic (PLEG): container finished" podID="d77c8e13-cf19-438b-9d62-575041c50699" containerID="26ae9306f5a759663d3823f1cc1a7b682b5b0b1eb773fcc2f71a7eb13aa5634f" exitCode=0 Jan 30 11:06:37 crc kubenswrapper[4869]: I0130 11:06:37.982903 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t" event={"ID":"d77c8e13-cf19-438b-9d62-575041c50699","Type":"ContainerDied","Data":"26ae9306f5a759663d3823f1cc1a7b682b5b0b1eb773fcc2f71a7eb13aa5634f"} Jan 30 11:06:37 crc kubenswrapper[4869]: I0130 11:06:37.983147 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t" event={"ID":"d77c8e13-cf19-438b-9d62-575041c50699","Type":"ContainerStarted","Data":"7a9324cc812457638307b04d9da7c5ad085882f93d2f67dcc0666358e43e5393"} Jan 30 11:06:39 crc kubenswrapper[4869]: I0130 11:06:39.994639 4869 generic.go:334] "Generic (PLEG): container finished" podID="d77c8e13-cf19-438b-9d62-575041c50699" containerID="ade133eb8026a7c6e5cd884ad7e0f66cbf2fd0e35fba451d667357d2230f376b" exitCode=0 Jan 30 11:06:39 crc kubenswrapper[4869]: I0130 11:06:39.994744 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t" event={"ID":"d77c8e13-cf19-438b-9d62-575041c50699","Type":"ContainerDied","Data":"ade133eb8026a7c6e5cd884ad7e0f66cbf2fd0e35fba451d667357d2230f376b"} Jan 30 11:06:41 crc kubenswrapper[4869]: I0130 11:06:41.002381 4869 generic.go:334] "Generic (PLEG): container finished" podID="d77c8e13-cf19-438b-9d62-575041c50699" containerID="950c1ab6560e74dea8aa8f8cdb23a50d9fa8ebf8833473791785e27b6097e507" exitCode=0 Jan 30 11:06:41 crc kubenswrapper[4869]: I0130 11:06:41.002425 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t" event={"ID":"d77c8e13-cf19-438b-9d62-575041c50699","Type":"ContainerDied","Data":"950c1ab6560e74dea8aa8f8cdb23a50d9fa8ebf8833473791785e27b6097e507"} Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.455523 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t" Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.514626 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d77c8e13-cf19-438b-9d62-575041c50699-bundle\") pod \"d77c8e13-cf19-438b-9d62-575041c50699\" (UID: \"d77c8e13-cf19-438b-9d62-575041c50699\") " Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.514969 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ff6fk\" (UniqueName: \"kubernetes.io/projected/d77c8e13-cf19-438b-9d62-575041c50699-kube-api-access-ff6fk\") pod \"d77c8e13-cf19-438b-9d62-575041c50699\" (UID: \"d77c8e13-cf19-438b-9d62-575041c50699\") " Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.515075 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d77c8e13-cf19-438b-9d62-575041c50699-util\") pod \"d77c8e13-cf19-438b-9d62-575041c50699\" (UID: \"d77c8e13-cf19-438b-9d62-575041c50699\") " Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.516232 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d77c8e13-cf19-438b-9d62-575041c50699-bundle" (OuterVolumeSpecName: "bundle") pod "d77c8e13-cf19-438b-9d62-575041c50699" (UID: "d77c8e13-cf19-438b-9d62-575041c50699"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.516635 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jprfl"] Jan 30 11:06:42 crc kubenswrapper[4869]: E0130 11:06:42.516892 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d77c8e13-cf19-438b-9d62-575041c50699" containerName="pull" Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.516909 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d77c8e13-cf19-438b-9d62-575041c50699" containerName="pull" Jan 30 11:06:42 crc kubenswrapper[4869]: E0130 11:06:42.516920 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d77c8e13-cf19-438b-9d62-575041c50699" containerName="util" Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.516926 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d77c8e13-cf19-438b-9d62-575041c50699" containerName="util" Jan 30 11:06:42 crc kubenswrapper[4869]: E0130 11:06:42.516943 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d77c8e13-cf19-438b-9d62-575041c50699" containerName="extract" Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.516949 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d77c8e13-cf19-438b-9d62-575041c50699" containerName="extract" Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.517043 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="d77c8e13-cf19-438b-9d62-575041c50699" containerName="extract" Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.517852 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jprfl" Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.523672 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d77c8e13-cf19-438b-9d62-575041c50699-kube-api-access-ff6fk" (OuterVolumeSpecName: "kube-api-access-ff6fk") pod "d77c8e13-cf19-438b-9d62-575041c50699" (UID: "d77c8e13-cf19-438b-9d62-575041c50699"). InnerVolumeSpecName "kube-api-access-ff6fk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.526603 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jprfl"] Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.544283 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d77c8e13-cf19-438b-9d62-575041c50699-util" (OuterVolumeSpecName: "util") pod "d77c8e13-cf19-438b-9d62-575041c50699" (UID: "d77c8e13-cf19-438b-9d62-575041c50699"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.616554 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8eb7145-962c-4ac8-a415-206c7808b0f3-catalog-content\") pod \"redhat-operators-jprfl\" (UID: \"b8eb7145-962c-4ac8-a415-206c7808b0f3\") " pod="openshift-marketplace/redhat-operators-jprfl" Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.616601 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lgtc\" (UniqueName: \"kubernetes.io/projected/b8eb7145-962c-4ac8-a415-206c7808b0f3-kube-api-access-7lgtc\") pod \"redhat-operators-jprfl\" (UID: \"b8eb7145-962c-4ac8-a415-206c7808b0f3\") " pod="openshift-marketplace/redhat-operators-jprfl" Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.616796 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8eb7145-962c-4ac8-a415-206c7808b0f3-utilities\") pod \"redhat-operators-jprfl\" (UID: \"b8eb7145-962c-4ac8-a415-206c7808b0f3\") " pod="openshift-marketplace/redhat-operators-jprfl" Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.616920 4869 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d77c8e13-cf19-438b-9d62-575041c50699-util\") on node \"crc\" DevicePath \"\"" Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.616940 4869 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d77c8e13-cf19-438b-9d62-575041c50699-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.616955 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ff6fk\" (UniqueName: \"kubernetes.io/projected/d77c8e13-cf19-438b-9d62-575041c50699-kube-api-access-ff6fk\") on node \"crc\" DevicePath \"\"" Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.718098 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8eb7145-962c-4ac8-a415-206c7808b0f3-utilities\") pod \"redhat-operators-jprfl\" (UID: \"b8eb7145-962c-4ac8-a415-206c7808b0f3\") " pod="openshift-marketplace/redhat-operators-jprfl" Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.718167 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8eb7145-962c-4ac8-a415-206c7808b0f3-catalog-content\") pod \"redhat-operators-jprfl\" (UID: \"b8eb7145-962c-4ac8-a415-206c7808b0f3\") " pod="openshift-marketplace/redhat-operators-jprfl" Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.718190 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lgtc\" (UniqueName: \"kubernetes.io/projected/b8eb7145-962c-4ac8-a415-206c7808b0f3-kube-api-access-7lgtc\") pod \"redhat-operators-jprfl\" (UID: \"b8eb7145-962c-4ac8-a415-206c7808b0f3\") " pod="openshift-marketplace/redhat-operators-jprfl" Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.718957 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8eb7145-962c-4ac8-a415-206c7808b0f3-utilities\") pod \"redhat-operators-jprfl\" (UID: \"b8eb7145-962c-4ac8-a415-206c7808b0f3\") " pod="openshift-marketplace/redhat-operators-jprfl" Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.718973 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8eb7145-962c-4ac8-a415-206c7808b0f3-catalog-content\") pod \"redhat-operators-jprfl\" (UID: \"b8eb7145-962c-4ac8-a415-206c7808b0f3\") " pod="openshift-marketplace/redhat-operators-jprfl" Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.735595 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lgtc\" (UniqueName: \"kubernetes.io/projected/b8eb7145-962c-4ac8-a415-206c7808b0f3-kube-api-access-7lgtc\") pod \"redhat-operators-jprfl\" (UID: \"b8eb7145-962c-4ac8-a415-206c7808b0f3\") " pod="openshift-marketplace/redhat-operators-jprfl" Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.861151 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jprfl" Jan 30 11:06:42 crc kubenswrapper[4869]: I0130 11:06:42.966749 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-z8qjp" podUID="0f3523c0-5e3b-435a-b83d-83c3a0c4dca2" containerName="console" containerID="cri-o://ef14f342453f2db2ad1d31614fd7aaa6acfbd1b699709829a0ab1b4c3f4d3a69" gracePeriod=15 Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.022187 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t" event={"ID":"d77c8e13-cf19-438b-9d62-575041c50699","Type":"ContainerDied","Data":"7a9324cc812457638307b04d9da7c5ad085882f93d2f67dcc0666358e43e5393"} Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.022503 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a9324cc812457638307b04d9da7c5ad085882f93d2f67dcc0666358e43e5393" Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.022362 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t" Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.265694 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jprfl"] Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.303947 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-z8qjp_0f3523c0-5e3b-435a-b83d-83c3a0c4dca2/console/0.log" Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.304017 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.426576 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-console-oauth-config\") pod \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.427025 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-msd2p\" (UniqueName: \"kubernetes.io/projected/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-kube-api-access-msd2p\") pod \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.427112 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-oauth-serving-cert\") pod \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.427196 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-service-ca\") pod \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.427276 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-console-serving-cert\") pod \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.427354 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-console-config\") pod \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.427429 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-trusted-ca-bundle\") pod \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\" (UID: \"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2\") " Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.428205 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "0f3523c0-5e3b-435a-b83d-83c3a0c4dca2" (UID: "0f3523c0-5e3b-435a-b83d-83c3a0c4dca2"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.428197 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-service-ca" (OuterVolumeSpecName: "service-ca") pod "0f3523c0-5e3b-435a-b83d-83c3a0c4dca2" (UID: "0f3523c0-5e3b-435a-b83d-83c3a0c4dca2"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.428533 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-console-config" (OuterVolumeSpecName: "console-config") pod "0f3523c0-5e3b-435a-b83d-83c3a0c4dca2" (UID: "0f3523c0-5e3b-435a-b83d-83c3a0c4dca2"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.428897 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "0f3523c0-5e3b-435a-b83d-83c3a0c4dca2" (UID: "0f3523c0-5e3b-435a-b83d-83c3a0c4dca2"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.432825 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "0f3523c0-5e3b-435a-b83d-83c3a0c4dca2" (UID: "0f3523c0-5e3b-435a-b83d-83c3a0c4dca2"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.433366 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-kube-api-access-msd2p" (OuterVolumeSpecName: "kube-api-access-msd2p") pod "0f3523c0-5e3b-435a-b83d-83c3a0c4dca2" (UID: "0f3523c0-5e3b-435a-b83d-83c3a0c4dca2"). InnerVolumeSpecName "kube-api-access-msd2p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.437920 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "0f3523c0-5e3b-435a-b83d-83c3a0c4dca2" (UID: "0f3523c0-5e3b-435a-b83d-83c3a0c4dca2"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.529161 4869 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.529200 4869 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-service-ca\") on node \"crc\" DevicePath \"\"" Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.529211 4869 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.529219 4869 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-console-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.529227 4869 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.529235 4869 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:06:43 crc kubenswrapper[4869]: I0130 11:06:43.529243 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-msd2p\" (UniqueName: \"kubernetes.io/projected/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2-kube-api-access-msd2p\") on node \"crc\" DevicePath \"\"" Jan 30 11:06:44 crc kubenswrapper[4869]: I0130 11:06:44.027436 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-z8qjp_0f3523c0-5e3b-435a-b83d-83c3a0c4dca2/console/0.log" Jan 30 11:06:44 crc kubenswrapper[4869]: I0130 11:06:44.027475 4869 generic.go:334] "Generic (PLEG): container finished" podID="0f3523c0-5e3b-435a-b83d-83c3a0c4dca2" containerID="ef14f342453f2db2ad1d31614fd7aaa6acfbd1b699709829a0ab1b4c3f4d3a69" exitCode=2 Jan 30 11:06:44 crc kubenswrapper[4869]: I0130 11:06:44.027518 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-z8qjp" event={"ID":"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2","Type":"ContainerDied","Data":"ef14f342453f2db2ad1d31614fd7aaa6acfbd1b699709829a0ab1b4c3f4d3a69"} Jan 30 11:06:44 crc kubenswrapper[4869]: I0130 11:06:44.027542 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-z8qjp" event={"ID":"0f3523c0-5e3b-435a-b83d-83c3a0c4dca2","Type":"ContainerDied","Data":"9a321dc364d7adc27a6a137eb2ad8d1f26e64e1e7cf8da8b45869e31ffe8835d"} Jan 30 11:06:44 crc kubenswrapper[4869]: I0130 11:06:44.027557 4869 scope.go:117] "RemoveContainer" containerID="ef14f342453f2db2ad1d31614fd7aaa6acfbd1b699709829a0ab1b4c3f4d3a69" Jan 30 11:06:44 crc kubenswrapper[4869]: I0130 11:06:44.027636 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-z8qjp" Jan 30 11:06:44 crc kubenswrapper[4869]: I0130 11:06:44.035025 4869 generic.go:334] "Generic (PLEG): container finished" podID="b8eb7145-962c-4ac8-a415-206c7808b0f3" containerID="2eae1aedce22aeab95d199de1f7de32399fca19791577f5517d56a39c1814d7d" exitCode=0 Jan 30 11:06:44 crc kubenswrapper[4869]: I0130 11:06:44.035076 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jprfl" event={"ID":"b8eb7145-962c-4ac8-a415-206c7808b0f3","Type":"ContainerDied","Data":"2eae1aedce22aeab95d199de1f7de32399fca19791577f5517d56a39c1814d7d"} Jan 30 11:06:44 crc kubenswrapper[4869]: I0130 11:06:44.035107 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jprfl" event={"ID":"b8eb7145-962c-4ac8-a415-206c7808b0f3","Type":"ContainerStarted","Data":"7ea841edeb3dbfb02c7dc9021d91c44c679663fc75eaff0b0b15a216f30d053c"} Jan 30 11:06:44 crc kubenswrapper[4869]: I0130 11:06:44.050746 4869 scope.go:117] "RemoveContainer" containerID="ef14f342453f2db2ad1d31614fd7aaa6acfbd1b699709829a0ab1b4c3f4d3a69" Jan 30 11:06:44 crc kubenswrapper[4869]: E0130 11:06:44.053922 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef14f342453f2db2ad1d31614fd7aaa6acfbd1b699709829a0ab1b4c3f4d3a69\": container with ID starting with ef14f342453f2db2ad1d31614fd7aaa6acfbd1b699709829a0ab1b4c3f4d3a69 not found: ID does not exist" containerID="ef14f342453f2db2ad1d31614fd7aaa6acfbd1b699709829a0ab1b4c3f4d3a69" Jan 30 11:06:44 crc kubenswrapper[4869]: I0130 11:06:44.054177 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef14f342453f2db2ad1d31614fd7aaa6acfbd1b699709829a0ab1b4c3f4d3a69"} err="failed to get container status \"ef14f342453f2db2ad1d31614fd7aaa6acfbd1b699709829a0ab1b4c3f4d3a69\": rpc error: code = NotFound desc = could not find container \"ef14f342453f2db2ad1d31614fd7aaa6acfbd1b699709829a0ab1b4c3f4d3a69\": container with ID starting with ef14f342453f2db2ad1d31614fd7aaa6acfbd1b699709829a0ab1b4c3f4d3a69 not found: ID does not exist" Jan 30 11:06:44 crc kubenswrapper[4869]: I0130 11:06:44.064152 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-z8qjp"] Jan 30 11:06:44 crc kubenswrapper[4869]: I0130 11:06:44.068157 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-z8qjp"] Jan 30 11:06:44 crc kubenswrapper[4869]: I0130 11:06:44.139246 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f3523c0-5e3b-435a-b83d-83c3a0c4dca2" path="/var/lib/kubelet/pods/0f3523c0-5e3b-435a-b83d-83c3a0c4dca2/volumes" Jan 30 11:06:45 crc kubenswrapper[4869]: I0130 11:06:45.042669 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jprfl" event={"ID":"b8eb7145-962c-4ac8-a415-206c7808b0f3","Type":"ContainerStarted","Data":"f51484a8e3a7483bc20dffddfbc6dfe9185267f63662fda009249dcb07bfd910"} Jan 30 11:06:45 crc kubenswrapper[4869]: I0130 11:06:45.130654 4869 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 30 11:06:47 crc kubenswrapper[4869]: I0130 11:06:47.056037 4869 generic.go:334] "Generic (PLEG): container finished" podID="b8eb7145-962c-4ac8-a415-206c7808b0f3" containerID="f51484a8e3a7483bc20dffddfbc6dfe9185267f63662fda009249dcb07bfd910" exitCode=0 Jan 30 11:06:47 crc kubenswrapper[4869]: I0130 11:06:47.056083 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jprfl" event={"ID":"b8eb7145-962c-4ac8-a415-206c7808b0f3","Type":"ContainerDied","Data":"f51484a8e3a7483bc20dffddfbc6dfe9185267f63662fda009249dcb07bfd910"} Jan 30 11:06:48 crc kubenswrapper[4869]: I0130 11:06:48.062889 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jprfl" event={"ID":"b8eb7145-962c-4ac8-a415-206c7808b0f3","Type":"ContainerStarted","Data":"f683a1e8f66b468f87cd782869b6a750bae91c86c820dea4e0d828f0753dc70d"} Jan 30 11:06:48 crc kubenswrapper[4869]: I0130 11:06:48.085792 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jprfl" podStartSLOduration=2.662012239 podStartE2EDuration="6.085776448s" podCreationTimestamp="2026-01-30 11:06:42 +0000 UTC" firstStartedPulling="2026-01-30 11:06:44.036411764 +0000 UTC m=+754.586287830" lastFinishedPulling="2026-01-30 11:06:47.460175973 +0000 UTC m=+758.010052039" observedRunningTime="2026-01-30 11:06:48.083181785 +0000 UTC m=+758.633057871" watchObservedRunningTime="2026-01-30 11:06:48.085776448 +0000 UTC m=+758.635652514" Jan 30 11:06:51 crc kubenswrapper[4869]: I0130 11:06:51.840241 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-b456d8f47-lgc55"] Jan 30 11:06:51 crc kubenswrapper[4869]: E0130 11:06:51.840767 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f3523c0-5e3b-435a-b83d-83c3a0c4dca2" containerName="console" Jan 30 11:06:51 crc kubenswrapper[4869]: I0130 11:06:51.840780 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f3523c0-5e3b-435a-b83d-83c3a0c4dca2" containerName="console" Jan 30 11:06:51 crc kubenswrapper[4869]: I0130 11:06:51.840895 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f3523c0-5e3b-435a-b83d-83c3a0c4dca2" containerName="console" Jan 30 11:06:51 crc kubenswrapper[4869]: I0130 11:06:51.841282 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-b456d8f47-lgc55" Jan 30 11:06:51 crc kubenswrapper[4869]: I0130 11:06:51.843439 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Jan 30 11:06:51 crc kubenswrapper[4869]: I0130 11:06:51.843773 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Jan 30 11:06:51 crc kubenswrapper[4869]: I0130 11:06:51.844014 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Jan 30 11:06:51 crc kubenswrapper[4869]: I0130 11:06:51.844235 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Jan 30 11:06:51 crc kubenswrapper[4869]: I0130 11:06:51.845359 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-bcvgx" Jan 30 11:06:51 crc kubenswrapper[4869]: I0130 11:06:51.870532 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-b456d8f47-lgc55"] Jan 30 11:06:51 crc kubenswrapper[4869]: I0130 11:06:51.937394 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0e2c40c0-e880-4c08-bb45-037f69a35fa9-webhook-cert\") pod \"metallb-operator-controller-manager-b456d8f47-lgc55\" (UID: \"0e2c40c0-e880-4c08-bb45-037f69a35fa9\") " pod="metallb-system/metallb-operator-controller-manager-b456d8f47-lgc55" Jan 30 11:06:51 crc kubenswrapper[4869]: I0130 11:06:51.937554 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0e2c40c0-e880-4c08-bb45-037f69a35fa9-apiservice-cert\") pod \"metallb-operator-controller-manager-b456d8f47-lgc55\" (UID: \"0e2c40c0-e880-4c08-bb45-037f69a35fa9\") " pod="metallb-system/metallb-operator-controller-manager-b456d8f47-lgc55" Jan 30 11:06:51 crc kubenswrapper[4869]: I0130 11:06:51.937662 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5b65j\" (UniqueName: \"kubernetes.io/projected/0e2c40c0-e880-4c08-bb45-037f69a35fa9-kube-api-access-5b65j\") pod \"metallb-operator-controller-manager-b456d8f47-lgc55\" (UID: \"0e2c40c0-e880-4c08-bb45-037f69a35fa9\") " pod="metallb-system/metallb-operator-controller-manager-b456d8f47-lgc55" Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.038814 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0e2c40c0-e880-4c08-bb45-037f69a35fa9-apiservice-cert\") pod \"metallb-operator-controller-manager-b456d8f47-lgc55\" (UID: \"0e2c40c0-e880-4c08-bb45-037f69a35fa9\") " pod="metallb-system/metallb-operator-controller-manager-b456d8f47-lgc55" Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.038885 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5b65j\" (UniqueName: \"kubernetes.io/projected/0e2c40c0-e880-4c08-bb45-037f69a35fa9-kube-api-access-5b65j\") pod \"metallb-operator-controller-manager-b456d8f47-lgc55\" (UID: \"0e2c40c0-e880-4c08-bb45-037f69a35fa9\") " pod="metallb-system/metallb-operator-controller-manager-b456d8f47-lgc55" Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.038930 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0e2c40c0-e880-4c08-bb45-037f69a35fa9-webhook-cert\") pod \"metallb-operator-controller-manager-b456d8f47-lgc55\" (UID: \"0e2c40c0-e880-4c08-bb45-037f69a35fa9\") " pod="metallb-system/metallb-operator-controller-manager-b456d8f47-lgc55" Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.045216 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0e2c40c0-e880-4c08-bb45-037f69a35fa9-webhook-cert\") pod \"metallb-operator-controller-manager-b456d8f47-lgc55\" (UID: \"0e2c40c0-e880-4c08-bb45-037f69a35fa9\") " pod="metallb-system/metallb-operator-controller-manager-b456d8f47-lgc55" Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.045682 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0e2c40c0-e880-4c08-bb45-037f69a35fa9-apiservice-cert\") pod \"metallb-operator-controller-manager-b456d8f47-lgc55\" (UID: \"0e2c40c0-e880-4c08-bb45-037f69a35fa9\") " pod="metallb-system/metallb-operator-controller-manager-b456d8f47-lgc55" Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.065785 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5b65j\" (UniqueName: \"kubernetes.io/projected/0e2c40c0-e880-4c08-bb45-037f69a35fa9-kube-api-access-5b65j\") pod \"metallb-operator-controller-manager-b456d8f47-lgc55\" (UID: \"0e2c40c0-e880-4c08-bb45-037f69a35fa9\") " pod="metallb-system/metallb-operator-controller-manager-b456d8f47-lgc55" Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.113395 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-67776d4d6b-k4bgw"] Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.114343 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-67776d4d6b-k4bgw" Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.117762 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.117818 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.117783 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-vrldm" Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.140006 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-67776d4d6b-k4bgw"] Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.169291 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-b456d8f47-lgc55" Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.270437 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e3c75ec9-dae6-418e-8ea0-3d0ab1c8d1a4-webhook-cert\") pod \"metallb-operator-webhook-server-67776d4d6b-k4bgw\" (UID: \"e3c75ec9-dae6-418e-8ea0-3d0ab1c8d1a4\") " pod="metallb-system/metallb-operator-webhook-server-67776d4d6b-k4bgw" Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.270535 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e3c75ec9-dae6-418e-8ea0-3d0ab1c8d1a4-apiservice-cert\") pod \"metallb-operator-webhook-server-67776d4d6b-k4bgw\" (UID: \"e3c75ec9-dae6-418e-8ea0-3d0ab1c8d1a4\") " pod="metallb-system/metallb-operator-webhook-server-67776d4d6b-k4bgw" Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.270777 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69zng\" (UniqueName: \"kubernetes.io/projected/e3c75ec9-dae6-418e-8ea0-3d0ab1c8d1a4-kube-api-access-69zng\") pod \"metallb-operator-webhook-server-67776d4d6b-k4bgw\" (UID: \"e3c75ec9-dae6-418e-8ea0-3d0ab1c8d1a4\") " pod="metallb-system/metallb-operator-webhook-server-67776d4d6b-k4bgw" Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.372525 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e3c75ec9-dae6-418e-8ea0-3d0ab1c8d1a4-apiservice-cert\") pod \"metallb-operator-webhook-server-67776d4d6b-k4bgw\" (UID: \"e3c75ec9-dae6-418e-8ea0-3d0ab1c8d1a4\") " pod="metallb-system/metallb-operator-webhook-server-67776d4d6b-k4bgw" Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.372574 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69zng\" (UniqueName: \"kubernetes.io/projected/e3c75ec9-dae6-418e-8ea0-3d0ab1c8d1a4-kube-api-access-69zng\") pod \"metallb-operator-webhook-server-67776d4d6b-k4bgw\" (UID: \"e3c75ec9-dae6-418e-8ea0-3d0ab1c8d1a4\") " pod="metallb-system/metallb-operator-webhook-server-67776d4d6b-k4bgw" Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.372644 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e3c75ec9-dae6-418e-8ea0-3d0ab1c8d1a4-webhook-cert\") pod \"metallb-operator-webhook-server-67776d4d6b-k4bgw\" (UID: \"e3c75ec9-dae6-418e-8ea0-3d0ab1c8d1a4\") " pod="metallb-system/metallb-operator-webhook-server-67776d4d6b-k4bgw" Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.377254 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e3c75ec9-dae6-418e-8ea0-3d0ab1c8d1a4-webhook-cert\") pod \"metallb-operator-webhook-server-67776d4d6b-k4bgw\" (UID: \"e3c75ec9-dae6-418e-8ea0-3d0ab1c8d1a4\") " pod="metallb-system/metallb-operator-webhook-server-67776d4d6b-k4bgw" Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.377315 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e3c75ec9-dae6-418e-8ea0-3d0ab1c8d1a4-apiservice-cert\") pod \"metallb-operator-webhook-server-67776d4d6b-k4bgw\" (UID: \"e3c75ec9-dae6-418e-8ea0-3d0ab1c8d1a4\") " pod="metallb-system/metallb-operator-webhook-server-67776d4d6b-k4bgw" Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.393914 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69zng\" (UniqueName: \"kubernetes.io/projected/e3c75ec9-dae6-418e-8ea0-3d0ab1c8d1a4-kube-api-access-69zng\") pod \"metallb-operator-webhook-server-67776d4d6b-k4bgw\" (UID: \"e3c75ec9-dae6-418e-8ea0-3d0ab1c8d1a4\") " pod="metallb-system/metallb-operator-webhook-server-67776d4d6b-k4bgw" Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.428695 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-67776d4d6b-k4bgw" Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.507532 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-b456d8f47-lgc55"] Jan 30 11:06:52 crc kubenswrapper[4869]: W0130 11:06:52.529124 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0e2c40c0_e880_4c08_bb45_037f69a35fa9.slice/crio-5823a4968976d38750c2bcd5a113a8ef400fcb2a5b52e1aceed6fe3638144d87 WatchSource:0}: Error finding container 5823a4968976d38750c2bcd5a113a8ef400fcb2a5b52e1aceed6fe3638144d87: Status 404 returned error can't find the container with id 5823a4968976d38750c2bcd5a113a8ef400fcb2a5b52e1aceed6fe3638144d87 Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.784293 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-67776d4d6b-k4bgw"] Jan 30 11:06:52 crc kubenswrapper[4869]: W0130 11:06:52.801439 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode3c75ec9_dae6_418e_8ea0_3d0ab1c8d1a4.slice/crio-e26e269f3d59e3a9fd972fc9f6cd28045bf80cbb4b8f23477ab351f981a959c9 WatchSource:0}: Error finding container e26e269f3d59e3a9fd972fc9f6cd28045bf80cbb4b8f23477ab351f981a959c9: Status 404 returned error can't find the container with id e26e269f3d59e3a9fd972fc9f6cd28045bf80cbb4b8f23477ab351f981a959c9 Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.861823 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jprfl" Jan 30 11:06:52 crc kubenswrapper[4869]: I0130 11:06:52.861924 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jprfl" Jan 30 11:06:53 crc kubenswrapper[4869]: I0130 11:06:53.091618 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-b456d8f47-lgc55" event={"ID":"0e2c40c0-e880-4c08-bb45-037f69a35fa9","Type":"ContainerStarted","Data":"5823a4968976d38750c2bcd5a113a8ef400fcb2a5b52e1aceed6fe3638144d87"} Jan 30 11:06:53 crc kubenswrapper[4869]: I0130 11:06:53.092863 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-67776d4d6b-k4bgw" event={"ID":"e3c75ec9-dae6-418e-8ea0-3d0ab1c8d1a4","Type":"ContainerStarted","Data":"e26e269f3d59e3a9fd972fc9f6cd28045bf80cbb4b8f23477ab351f981a959c9"} Jan 30 11:06:53 crc kubenswrapper[4869]: I0130 11:06:53.980114 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-jprfl" podUID="b8eb7145-962c-4ac8-a415-206c7808b0f3" containerName="registry-server" probeResult="failure" output=< Jan 30 11:06:53 crc kubenswrapper[4869]: timeout: failed to connect service ":50051" within 1s Jan 30 11:06:53 crc kubenswrapper[4869]: > Jan 30 11:06:59 crc kubenswrapper[4869]: I0130 11:06:59.165443 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-b456d8f47-lgc55" event={"ID":"0e2c40c0-e880-4c08-bb45-037f69a35fa9","Type":"ContainerStarted","Data":"ab1294cc890dd5ca0804fdddbb0ddedfbba419f42afc44a8c645fcaf1cda73cf"} Jan 30 11:06:59 crc kubenswrapper[4869]: I0130 11:06:59.166034 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-b456d8f47-lgc55" Jan 30 11:06:59 crc kubenswrapper[4869]: I0130 11:06:59.167135 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-67776d4d6b-k4bgw" event={"ID":"e3c75ec9-dae6-418e-8ea0-3d0ab1c8d1a4","Type":"ContainerStarted","Data":"c693ca7e3fb05838e6040ec9c583f6ad9d2c1adafe9dbbcf9717680203996bfc"} Jan 30 11:06:59 crc kubenswrapper[4869]: I0130 11:06:59.167309 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-67776d4d6b-k4bgw" Jan 30 11:06:59 crc kubenswrapper[4869]: I0130 11:06:59.201699 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-b456d8f47-lgc55" podStartSLOduration=1.944930909 podStartE2EDuration="8.201681261s" podCreationTimestamp="2026-01-30 11:06:51 +0000 UTC" firstStartedPulling="2026-01-30 11:06:52.540525619 +0000 UTC m=+763.090401685" lastFinishedPulling="2026-01-30 11:06:58.797275961 +0000 UTC m=+769.347152037" observedRunningTime="2026-01-30 11:06:59.186249892 +0000 UTC m=+769.736125958" watchObservedRunningTime="2026-01-30 11:06:59.201681261 +0000 UTC m=+769.751557327" Jan 30 11:06:59 crc kubenswrapper[4869]: I0130 11:06:59.203896 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-67776d4d6b-k4bgw" podStartSLOduration=1.192678768 podStartE2EDuration="7.203886383s" podCreationTimestamp="2026-01-30 11:06:52 +0000 UTC" firstStartedPulling="2026-01-30 11:06:52.805787286 +0000 UTC m=+763.355663352" lastFinishedPulling="2026-01-30 11:06:58.816994901 +0000 UTC m=+769.366870967" observedRunningTime="2026-01-30 11:06:59.200991301 +0000 UTC m=+769.750867387" watchObservedRunningTime="2026-01-30 11:06:59.203886383 +0000 UTC m=+769.753762479" Jan 30 11:07:02 crc kubenswrapper[4869]: I0130 11:07:02.914843 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jprfl" Jan 30 11:07:02 crc kubenswrapper[4869]: I0130 11:07:02.956832 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jprfl" Jan 30 11:07:03 crc kubenswrapper[4869]: I0130 11:07:03.145798 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jprfl"] Jan 30 11:07:04 crc kubenswrapper[4869]: I0130 11:07:04.192339 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jprfl" podUID="b8eb7145-962c-4ac8-a415-206c7808b0f3" containerName="registry-server" containerID="cri-o://f683a1e8f66b468f87cd782869b6a750bae91c86c820dea4e0d828f0753dc70d" gracePeriod=2 Jan 30 11:07:04 crc kubenswrapper[4869]: I0130 11:07:04.553298 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jprfl" Jan 30 11:07:04 crc kubenswrapper[4869]: I0130 11:07:04.659211 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8eb7145-962c-4ac8-a415-206c7808b0f3-catalog-content\") pod \"b8eb7145-962c-4ac8-a415-206c7808b0f3\" (UID: \"b8eb7145-962c-4ac8-a415-206c7808b0f3\") " Jan 30 11:07:04 crc kubenswrapper[4869]: I0130 11:07:04.659285 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7lgtc\" (UniqueName: \"kubernetes.io/projected/b8eb7145-962c-4ac8-a415-206c7808b0f3-kube-api-access-7lgtc\") pod \"b8eb7145-962c-4ac8-a415-206c7808b0f3\" (UID: \"b8eb7145-962c-4ac8-a415-206c7808b0f3\") " Jan 30 11:07:04 crc kubenswrapper[4869]: I0130 11:07:04.659320 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8eb7145-962c-4ac8-a415-206c7808b0f3-utilities\") pod \"b8eb7145-962c-4ac8-a415-206c7808b0f3\" (UID: \"b8eb7145-962c-4ac8-a415-206c7808b0f3\") " Jan 30 11:07:04 crc kubenswrapper[4869]: I0130 11:07:04.660138 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8eb7145-962c-4ac8-a415-206c7808b0f3-utilities" (OuterVolumeSpecName: "utilities") pod "b8eb7145-962c-4ac8-a415-206c7808b0f3" (UID: "b8eb7145-962c-4ac8-a415-206c7808b0f3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:07:04 crc kubenswrapper[4869]: I0130 11:07:04.664586 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8eb7145-962c-4ac8-a415-206c7808b0f3-kube-api-access-7lgtc" (OuterVolumeSpecName: "kube-api-access-7lgtc") pod "b8eb7145-962c-4ac8-a415-206c7808b0f3" (UID: "b8eb7145-962c-4ac8-a415-206c7808b0f3"). InnerVolumeSpecName "kube-api-access-7lgtc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:07:04 crc kubenswrapper[4869]: I0130 11:07:04.760585 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7lgtc\" (UniqueName: \"kubernetes.io/projected/b8eb7145-962c-4ac8-a415-206c7808b0f3-kube-api-access-7lgtc\") on node \"crc\" DevicePath \"\"" Jan 30 11:07:04 crc kubenswrapper[4869]: I0130 11:07:04.760633 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8eb7145-962c-4ac8-a415-206c7808b0f3-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 11:07:04 crc kubenswrapper[4869]: I0130 11:07:04.772153 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8eb7145-962c-4ac8-a415-206c7808b0f3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b8eb7145-962c-4ac8-a415-206c7808b0f3" (UID: "b8eb7145-962c-4ac8-a415-206c7808b0f3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:07:04 crc kubenswrapper[4869]: I0130 11:07:04.862372 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8eb7145-962c-4ac8-a415-206c7808b0f3-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 11:07:05 crc kubenswrapper[4869]: I0130 11:07:05.200348 4869 generic.go:334] "Generic (PLEG): container finished" podID="b8eb7145-962c-4ac8-a415-206c7808b0f3" containerID="f683a1e8f66b468f87cd782869b6a750bae91c86c820dea4e0d828f0753dc70d" exitCode=0 Jan 30 11:07:05 crc kubenswrapper[4869]: I0130 11:07:05.200387 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jprfl" Jan 30 11:07:05 crc kubenswrapper[4869]: I0130 11:07:05.200435 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jprfl" event={"ID":"b8eb7145-962c-4ac8-a415-206c7808b0f3","Type":"ContainerDied","Data":"f683a1e8f66b468f87cd782869b6a750bae91c86c820dea4e0d828f0753dc70d"} Jan 30 11:07:05 crc kubenswrapper[4869]: I0130 11:07:05.201185 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jprfl" event={"ID":"b8eb7145-962c-4ac8-a415-206c7808b0f3","Type":"ContainerDied","Data":"7ea841edeb3dbfb02c7dc9021d91c44c679663fc75eaff0b0b15a216f30d053c"} Jan 30 11:07:05 crc kubenswrapper[4869]: I0130 11:07:05.201208 4869 scope.go:117] "RemoveContainer" containerID="f683a1e8f66b468f87cd782869b6a750bae91c86c820dea4e0d828f0753dc70d" Jan 30 11:07:05 crc kubenswrapper[4869]: I0130 11:07:05.217030 4869 scope.go:117] "RemoveContainer" containerID="f51484a8e3a7483bc20dffddfbc6dfe9185267f63662fda009249dcb07bfd910" Jan 30 11:07:05 crc kubenswrapper[4869]: I0130 11:07:05.226071 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jprfl"] Jan 30 11:07:05 crc kubenswrapper[4869]: I0130 11:07:05.233969 4869 scope.go:117] "RemoveContainer" containerID="2eae1aedce22aeab95d199de1f7de32399fca19791577f5517d56a39c1814d7d" Jan 30 11:07:05 crc kubenswrapper[4869]: I0130 11:07:05.241238 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jprfl"] Jan 30 11:07:05 crc kubenswrapper[4869]: I0130 11:07:05.268095 4869 scope.go:117] "RemoveContainer" containerID="f683a1e8f66b468f87cd782869b6a750bae91c86c820dea4e0d828f0753dc70d" Jan 30 11:07:05 crc kubenswrapper[4869]: E0130 11:07:05.269722 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f683a1e8f66b468f87cd782869b6a750bae91c86c820dea4e0d828f0753dc70d\": container with ID starting with f683a1e8f66b468f87cd782869b6a750bae91c86c820dea4e0d828f0753dc70d not found: ID does not exist" containerID="f683a1e8f66b468f87cd782869b6a750bae91c86c820dea4e0d828f0753dc70d" Jan 30 11:07:05 crc kubenswrapper[4869]: I0130 11:07:05.269770 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f683a1e8f66b468f87cd782869b6a750bae91c86c820dea4e0d828f0753dc70d"} err="failed to get container status \"f683a1e8f66b468f87cd782869b6a750bae91c86c820dea4e0d828f0753dc70d\": rpc error: code = NotFound desc = could not find container \"f683a1e8f66b468f87cd782869b6a750bae91c86c820dea4e0d828f0753dc70d\": container with ID starting with f683a1e8f66b468f87cd782869b6a750bae91c86c820dea4e0d828f0753dc70d not found: ID does not exist" Jan 30 11:07:05 crc kubenswrapper[4869]: I0130 11:07:05.269796 4869 scope.go:117] "RemoveContainer" containerID="f51484a8e3a7483bc20dffddfbc6dfe9185267f63662fda009249dcb07bfd910" Jan 30 11:07:05 crc kubenswrapper[4869]: E0130 11:07:05.270201 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f51484a8e3a7483bc20dffddfbc6dfe9185267f63662fda009249dcb07bfd910\": container with ID starting with f51484a8e3a7483bc20dffddfbc6dfe9185267f63662fda009249dcb07bfd910 not found: ID does not exist" containerID="f51484a8e3a7483bc20dffddfbc6dfe9185267f63662fda009249dcb07bfd910" Jan 30 11:07:05 crc kubenswrapper[4869]: I0130 11:07:05.270280 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f51484a8e3a7483bc20dffddfbc6dfe9185267f63662fda009249dcb07bfd910"} err="failed to get container status \"f51484a8e3a7483bc20dffddfbc6dfe9185267f63662fda009249dcb07bfd910\": rpc error: code = NotFound desc = could not find container \"f51484a8e3a7483bc20dffddfbc6dfe9185267f63662fda009249dcb07bfd910\": container with ID starting with f51484a8e3a7483bc20dffddfbc6dfe9185267f63662fda009249dcb07bfd910 not found: ID does not exist" Jan 30 11:07:05 crc kubenswrapper[4869]: I0130 11:07:05.270348 4869 scope.go:117] "RemoveContainer" containerID="2eae1aedce22aeab95d199de1f7de32399fca19791577f5517d56a39c1814d7d" Jan 30 11:07:05 crc kubenswrapper[4869]: E0130 11:07:05.270866 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2eae1aedce22aeab95d199de1f7de32399fca19791577f5517d56a39c1814d7d\": container with ID starting with 2eae1aedce22aeab95d199de1f7de32399fca19791577f5517d56a39c1814d7d not found: ID does not exist" containerID="2eae1aedce22aeab95d199de1f7de32399fca19791577f5517d56a39c1814d7d" Jan 30 11:07:05 crc kubenswrapper[4869]: I0130 11:07:05.270910 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2eae1aedce22aeab95d199de1f7de32399fca19791577f5517d56a39c1814d7d"} err="failed to get container status \"2eae1aedce22aeab95d199de1f7de32399fca19791577f5517d56a39c1814d7d\": rpc error: code = NotFound desc = could not find container \"2eae1aedce22aeab95d199de1f7de32399fca19791577f5517d56a39c1814d7d\": container with ID starting with 2eae1aedce22aeab95d199de1f7de32399fca19791577f5517d56a39c1814d7d not found: ID does not exist" Jan 30 11:07:06 crc kubenswrapper[4869]: I0130 11:07:06.142316 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8eb7145-962c-4ac8-a415-206c7808b0f3" path="/var/lib/kubelet/pods/b8eb7145-962c-4ac8-a415-206c7808b0f3/volumes" Jan 30 11:07:12 crc kubenswrapper[4869]: I0130 11:07:12.433043 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-67776d4d6b-k4bgw" Jan 30 11:07:21 crc kubenswrapper[4869]: I0130 11:07:21.769678 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:07:21 crc kubenswrapper[4869]: I0130 11:07:21.770401 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.171865 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-b456d8f47-lgc55" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.821302 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-fwqrp"] Jan 30 11:07:32 crc kubenswrapper[4869]: E0130 11:07:32.821558 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8eb7145-962c-4ac8-a415-206c7808b0f3" containerName="extract-content" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.821579 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8eb7145-962c-4ac8-a415-206c7808b0f3" containerName="extract-content" Jan 30 11:07:32 crc kubenswrapper[4869]: E0130 11:07:32.821589 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8eb7145-962c-4ac8-a415-206c7808b0f3" containerName="registry-server" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.821599 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8eb7145-962c-4ac8-a415-206c7808b0f3" containerName="registry-server" Jan 30 11:07:32 crc kubenswrapper[4869]: E0130 11:07:32.821610 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8eb7145-962c-4ac8-a415-206c7808b0f3" containerName="extract-utilities" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.821617 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8eb7145-962c-4ac8-a415-206c7808b0f3" containerName="extract-utilities" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.821758 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8eb7145-962c-4ac8-a415-206c7808b0f3" containerName="registry-server" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.822205 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-fwqrp" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.827196 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.827633 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-lwsmf" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.837086 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-v6jfd"] Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.843214 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.846363 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-fwqrp"] Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.853132 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.853523 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.886226 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50962bee-f856-4fd9-95f5-4b697b0212f2-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-fwqrp\" (UID: \"50962bee-f856-4fd9-95f5-4b697b0212f2\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-fwqrp" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.886287 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/9a5abb53-8c8c-4293-a0ac-594d43f9b703-frr-startup\") pod \"frr-k8s-v6jfd\" (UID: \"9a5abb53-8c8c-4293-a0ac-594d43f9b703\") " pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.886312 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/9a5abb53-8c8c-4293-a0ac-594d43f9b703-metrics\") pod \"frr-k8s-v6jfd\" (UID: \"9a5abb53-8c8c-4293-a0ac-594d43f9b703\") " pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.886334 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9a5abb53-8c8c-4293-a0ac-594d43f9b703-metrics-certs\") pod \"frr-k8s-v6jfd\" (UID: \"9a5abb53-8c8c-4293-a0ac-594d43f9b703\") " pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.886356 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/9a5abb53-8c8c-4293-a0ac-594d43f9b703-frr-sockets\") pod \"frr-k8s-v6jfd\" (UID: \"9a5abb53-8c8c-4293-a0ac-594d43f9b703\") " pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.886377 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7xzr\" (UniqueName: \"kubernetes.io/projected/50962bee-f856-4fd9-95f5-4b697b0212f2-kube-api-access-s7xzr\") pod \"frr-k8s-webhook-server-7df86c4f6c-fwqrp\" (UID: \"50962bee-f856-4fd9-95f5-4b697b0212f2\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-fwqrp" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.886409 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/9a5abb53-8c8c-4293-a0ac-594d43f9b703-frr-conf\") pod \"frr-k8s-v6jfd\" (UID: \"9a5abb53-8c8c-4293-a0ac-594d43f9b703\") " pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.886426 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfkzq\" (UniqueName: \"kubernetes.io/projected/9a5abb53-8c8c-4293-a0ac-594d43f9b703-kube-api-access-lfkzq\") pod \"frr-k8s-v6jfd\" (UID: \"9a5abb53-8c8c-4293-a0ac-594d43f9b703\") " pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.886450 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/9a5abb53-8c8c-4293-a0ac-594d43f9b703-reloader\") pod \"frr-k8s-v6jfd\" (UID: \"9a5abb53-8c8c-4293-a0ac-594d43f9b703\") " pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.924952 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-tbmhh"] Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.927497 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-tbmhh" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.930595 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.930637 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.931267 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.931685 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-z4jsq" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.946792 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6968d8fdc4-v8k8c"] Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.950041 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-v8k8c" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.952580 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.955284 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-v8k8c"] Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.989782 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrbwq\" (UniqueName: \"kubernetes.io/projected/a8bd3be4-3c24-4856-a0bb-3efe3d9f716f-kube-api-access-wrbwq\") pod \"speaker-tbmhh\" (UID: \"a8bd3be4-3c24-4856-a0bb-3efe3d9f716f\") " pod="metallb-system/speaker-tbmhh" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.989859 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrfpj\" (UniqueName: \"kubernetes.io/projected/4879f7b2-b049-4a47-8d11-0868667299a6-kube-api-access-mrfpj\") pod \"controller-6968d8fdc4-v8k8c\" (UID: \"4879f7b2-b049-4a47-8d11-0868667299a6\") " pod="metallb-system/controller-6968d8fdc4-v8k8c" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.989910 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50962bee-f856-4fd9-95f5-4b697b0212f2-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-fwqrp\" (UID: \"50962bee-f856-4fd9-95f5-4b697b0212f2\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-fwqrp" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.989952 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/9a5abb53-8c8c-4293-a0ac-594d43f9b703-frr-startup\") pod \"frr-k8s-v6jfd\" (UID: \"9a5abb53-8c8c-4293-a0ac-594d43f9b703\") " pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.989985 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/9a5abb53-8c8c-4293-a0ac-594d43f9b703-metrics\") pod \"frr-k8s-v6jfd\" (UID: \"9a5abb53-8c8c-4293-a0ac-594d43f9b703\") " pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.990012 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/a8bd3be4-3c24-4856-a0bb-3efe3d9f716f-metallb-excludel2\") pod \"speaker-tbmhh\" (UID: \"a8bd3be4-3c24-4856-a0bb-3efe3d9f716f\") " pod="metallb-system/speaker-tbmhh" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.990039 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9a5abb53-8c8c-4293-a0ac-594d43f9b703-metrics-certs\") pod \"frr-k8s-v6jfd\" (UID: \"9a5abb53-8c8c-4293-a0ac-594d43f9b703\") " pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.990067 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/9a5abb53-8c8c-4293-a0ac-594d43f9b703-frr-sockets\") pod \"frr-k8s-v6jfd\" (UID: \"9a5abb53-8c8c-4293-a0ac-594d43f9b703\") " pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.990092 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7xzr\" (UniqueName: \"kubernetes.io/projected/50962bee-f856-4fd9-95f5-4b697b0212f2-kube-api-access-s7xzr\") pod \"frr-k8s-webhook-server-7df86c4f6c-fwqrp\" (UID: \"50962bee-f856-4fd9-95f5-4b697b0212f2\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-fwqrp" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.990124 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/a8bd3be4-3c24-4856-a0bb-3efe3d9f716f-memberlist\") pod \"speaker-tbmhh\" (UID: \"a8bd3be4-3c24-4856-a0bb-3efe3d9f716f\") " pod="metallb-system/speaker-tbmhh" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.990151 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4879f7b2-b049-4a47-8d11-0868667299a6-cert\") pod \"controller-6968d8fdc4-v8k8c\" (UID: \"4879f7b2-b049-4a47-8d11-0868667299a6\") " pod="metallb-system/controller-6968d8fdc4-v8k8c" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.990183 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/9a5abb53-8c8c-4293-a0ac-594d43f9b703-frr-conf\") pod \"frr-k8s-v6jfd\" (UID: \"9a5abb53-8c8c-4293-a0ac-594d43f9b703\") " pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.990211 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfkzq\" (UniqueName: \"kubernetes.io/projected/9a5abb53-8c8c-4293-a0ac-594d43f9b703-kube-api-access-lfkzq\") pod \"frr-k8s-v6jfd\" (UID: \"9a5abb53-8c8c-4293-a0ac-594d43f9b703\") " pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.990240 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/9a5abb53-8c8c-4293-a0ac-594d43f9b703-reloader\") pod \"frr-k8s-v6jfd\" (UID: \"9a5abb53-8c8c-4293-a0ac-594d43f9b703\") " pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.990297 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a8bd3be4-3c24-4856-a0bb-3efe3d9f716f-metrics-certs\") pod \"speaker-tbmhh\" (UID: \"a8bd3be4-3c24-4856-a0bb-3efe3d9f716f\") " pod="metallb-system/speaker-tbmhh" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.990326 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4879f7b2-b049-4a47-8d11-0868667299a6-metrics-certs\") pod \"controller-6968d8fdc4-v8k8c\" (UID: \"4879f7b2-b049-4a47-8d11-0868667299a6\") " pod="metallb-system/controller-6968d8fdc4-v8k8c" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.991042 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/9a5abb53-8c8c-4293-a0ac-594d43f9b703-frr-sockets\") pod \"frr-k8s-v6jfd\" (UID: \"9a5abb53-8c8c-4293-a0ac-594d43f9b703\") " pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.991334 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/9a5abb53-8c8c-4293-a0ac-594d43f9b703-frr-startup\") pod \"frr-k8s-v6jfd\" (UID: \"9a5abb53-8c8c-4293-a0ac-594d43f9b703\") " pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.991649 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/9a5abb53-8c8c-4293-a0ac-594d43f9b703-metrics\") pod \"frr-k8s-v6jfd\" (UID: \"9a5abb53-8c8c-4293-a0ac-594d43f9b703\") " pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:32 crc kubenswrapper[4869]: E0130 11:07:32.990119 4869 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Jan 30 11:07:32 crc kubenswrapper[4869]: E0130 11:07:32.991777 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/50962bee-f856-4fd9-95f5-4b697b0212f2-cert podName:50962bee-f856-4fd9-95f5-4b697b0212f2 nodeName:}" failed. No retries permitted until 2026-01-30 11:07:33.491752928 +0000 UTC m=+804.041629204 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/50962bee-f856-4fd9-95f5-4b697b0212f2-cert") pod "frr-k8s-webhook-server-7df86c4f6c-fwqrp" (UID: "50962bee-f856-4fd9-95f5-4b697b0212f2") : secret "frr-k8s-webhook-server-cert" not found Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.991859 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/9a5abb53-8c8c-4293-a0ac-594d43f9b703-frr-conf\") pod \"frr-k8s-v6jfd\" (UID: \"9a5abb53-8c8c-4293-a0ac-594d43f9b703\") " pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:32 crc kubenswrapper[4869]: I0130 11:07:32.991893 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/9a5abb53-8c8c-4293-a0ac-594d43f9b703-reloader\") pod \"frr-k8s-v6jfd\" (UID: \"9a5abb53-8c8c-4293-a0ac-594d43f9b703\") " pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:32 crc kubenswrapper[4869]: E0130 11:07:32.990558 4869 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Jan 30 11:07:32 crc kubenswrapper[4869]: E0130 11:07:32.992059 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9a5abb53-8c8c-4293-a0ac-594d43f9b703-metrics-certs podName:9a5abb53-8c8c-4293-a0ac-594d43f9b703 nodeName:}" failed. No retries permitted until 2026-01-30 11:07:33.492025776 +0000 UTC m=+804.041901842 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9a5abb53-8c8c-4293-a0ac-594d43f9b703-metrics-certs") pod "frr-k8s-v6jfd" (UID: "9a5abb53-8c8c-4293-a0ac-594d43f9b703") : secret "frr-k8s-certs-secret" not found Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.013330 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7xzr\" (UniqueName: \"kubernetes.io/projected/50962bee-f856-4fd9-95f5-4b697b0212f2-kube-api-access-s7xzr\") pod \"frr-k8s-webhook-server-7df86c4f6c-fwqrp\" (UID: \"50962bee-f856-4fd9-95f5-4b697b0212f2\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-fwqrp" Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.017434 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfkzq\" (UniqueName: \"kubernetes.io/projected/9a5abb53-8c8c-4293-a0ac-594d43f9b703-kube-api-access-lfkzq\") pod \"frr-k8s-v6jfd\" (UID: \"9a5abb53-8c8c-4293-a0ac-594d43f9b703\") " pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.092324 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a8bd3be4-3c24-4856-a0bb-3efe3d9f716f-metrics-certs\") pod \"speaker-tbmhh\" (UID: \"a8bd3be4-3c24-4856-a0bb-3efe3d9f716f\") " pod="metallb-system/speaker-tbmhh" Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.092397 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4879f7b2-b049-4a47-8d11-0868667299a6-metrics-certs\") pod \"controller-6968d8fdc4-v8k8c\" (UID: \"4879f7b2-b049-4a47-8d11-0868667299a6\") " pod="metallb-system/controller-6968d8fdc4-v8k8c" Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.092431 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrbwq\" (UniqueName: \"kubernetes.io/projected/a8bd3be4-3c24-4856-a0bb-3efe3d9f716f-kube-api-access-wrbwq\") pod \"speaker-tbmhh\" (UID: \"a8bd3be4-3c24-4856-a0bb-3efe3d9f716f\") " pod="metallb-system/speaker-tbmhh" Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.092453 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrfpj\" (UniqueName: \"kubernetes.io/projected/4879f7b2-b049-4a47-8d11-0868667299a6-kube-api-access-mrfpj\") pod \"controller-6968d8fdc4-v8k8c\" (UID: \"4879f7b2-b049-4a47-8d11-0868667299a6\") " pod="metallb-system/controller-6968d8fdc4-v8k8c" Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.092520 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/a8bd3be4-3c24-4856-a0bb-3efe3d9f716f-metallb-excludel2\") pod \"speaker-tbmhh\" (UID: \"a8bd3be4-3c24-4856-a0bb-3efe3d9f716f\") " pod="metallb-system/speaker-tbmhh" Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.092555 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/a8bd3be4-3c24-4856-a0bb-3efe3d9f716f-memberlist\") pod \"speaker-tbmhh\" (UID: \"a8bd3be4-3c24-4856-a0bb-3efe3d9f716f\") " pod="metallb-system/speaker-tbmhh" Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.092571 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4879f7b2-b049-4a47-8d11-0868667299a6-cert\") pod \"controller-6968d8fdc4-v8k8c\" (UID: \"4879f7b2-b049-4a47-8d11-0868667299a6\") " pod="metallb-system/controller-6968d8fdc4-v8k8c" Jan 30 11:07:33 crc kubenswrapper[4869]: E0130 11:07:33.092581 4869 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Jan 30 11:07:33 crc kubenswrapper[4869]: E0130 11:07:33.092691 4869 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Jan 30 11:07:33 crc kubenswrapper[4869]: E0130 11:07:33.092719 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a8bd3be4-3c24-4856-a0bb-3efe3d9f716f-metrics-certs podName:a8bd3be4-3c24-4856-a0bb-3efe3d9f716f nodeName:}" failed. No retries permitted until 2026-01-30 11:07:33.592671895 +0000 UTC m=+804.142547961 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a8bd3be4-3c24-4856-a0bb-3efe3d9f716f-metrics-certs") pod "speaker-tbmhh" (UID: "a8bd3be4-3c24-4856-a0bb-3efe3d9f716f") : secret "speaker-certs-secret" not found Jan 30 11:07:33 crc kubenswrapper[4869]: E0130 11:07:33.092871 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4879f7b2-b049-4a47-8d11-0868667299a6-metrics-certs podName:4879f7b2-b049-4a47-8d11-0868667299a6 nodeName:}" failed. No retries permitted until 2026-01-30 11:07:33.592825229 +0000 UTC m=+804.142701295 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4879f7b2-b049-4a47-8d11-0868667299a6-metrics-certs") pod "controller-6968d8fdc4-v8k8c" (UID: "4879f7b2-b049-4a47-8d11-0868667299a6") : secret "controller-certs-secret" not found Jan 30 11:07:33 crc kubenswrapper[4869]: E0130 11:07:33.093041 4869 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 30 11:07:33 crc kubenswrapper[4869]: E0130 11:07:33.093081 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a8bd3be4-3c24-4856-a0bb-3efe3d9f716f-memberlist podName:a8bd3be4-3c24-4856-a0bb-3efe3d9f716f nodeName:}" failed. No retries permitted until 2026-01-30 11:07:33.593073146 +0000 UTC m=+804.142949212 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/a8bd3be4-3c24-4856-a0bb-3efe3d9f716f-memberlist") pod "speaker-tbmhh" (UID: "a8bd3be4-3c24-4856-a0bb-3efe3d9f716f") : secret "metallb-memberlist" not found Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.093481 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/a8bd3be4-3c24-4856-a0bb-3efe3d9f716f-metallb-excludel2\") pod \"speaker-tbmhh\" (UID: \"a8bd3be4-3c24-4856-a0bb-3efe3d9f716f\") " pod="metallb-system/speaker-tbmhh" Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.097376 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.109039 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4879f7b2-b049-4a47-8d11-0868667299a6-cert\") pod \"controller-6968d8fdc4-v8k8c\" (UID: \"4879f7b2-b049-4a47-8d11-0868667299a6\") " pod="metallb-system/controller-6968d8fdc4-v8k8c" Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.116135 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrfpj\" (UniqueName: \"kubernetes.io/projected/4879f7b2-b049-4a47-8d11-0868667299a6-kube-api-access-mrfpj\") pod \"controller-6968d8fdc4-v8k8c\" (UID: \"4879f7b2-b049-4a47-8d11-0868667299a6\") " pod="metallb-system/controller-6968d8fdc4-v8k8c" Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.117507 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrbwq\" (UniqueName: \"kubernetes.io/projected/a8bd3be4-3c24-4856-a0bb-3efe3d9f716f-kube-api-access-wrbwq\") pod \"speaker-tbmhh\" (UID: \"a8bd3be4-3c24-4856-a0bb-3efe3d9f716f\") " pod="metallb-system/speaker-tbmhh" Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.498140 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50962bee-f856-4fd9-95f5-4b697b0212f2-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-fwqrp\" (UID: \"50962bee-f856-4fd9-95f5-4b697b0212f2\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-fwqrp" Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.498213 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9a5abb53-8c8c-4293-a0ac-594d43f9b703-metrics-certs\") pod \"frr-k8s-v6jfd\" (UID: \"9a5abb53-8c8c-4293-a0ac-594d43f9b703\") " pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.502135 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9a5abb53-8c8c-4293-a0ac-594d43f9b703-metrics-certs\") pod \"frr-k8s-v6jfd\" (UID: \"9a5abb53-8c8c-4293-a0ac-594d43f9b703\") " pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.502809 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50962bee-f856-4fd9-95f5-4b697b0212f2-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-fwqrp\" (UID: \"50962bee-f856-4fd9-95f5-4b697b0212f2\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-fwqrp" Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.598969 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/a8bd3be4-3c24-4856-a0bb-3efe3d9f716f-memberlist\") pod \"speaker-tbmhh\" (UID: \"a8bd3be4-3c24-4856-a0bb-3efe3d9f716f\") " pod="metallb-system/speaker-tbmhh" Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.599061 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a8bd3be4-3c24-4856-a0bb-3efe3d9f716f-metrics-certs\") pod \"speaker-tbmhh\" (UID: \"a8bd3be4-3c24-4856-a0bb-3efe3d9f716f\") " pod="metallb-system/speaker-tbmhh" Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.599087 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4879f7b2-b049-4a47-8d11-0868667299a6-metrics-certs\") pod \"controller-6968d8fdc4-v8k8c\" (UID: \"4879f7b2-b049-4a47-8d11-0868667299a6\") " pod="metallb-system/controller-6968d8fdc4-v8k8c" Jan 30 11:07:33 crc kubenswrapper[4869]: E0130 11:07:33.599176 4869 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 30 11:07:33 crc kubenswrapper[4869]: E0130 11:07:33.599260 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a8bd3be4-3c24-4856-a0bb-3efe3d9f716f-memberlist podName:a8bd3be4-3c24-4856-a0bb-3efe3d9f716f nodeName:}" failed. No retries permitted until 2026-01-30 11:07:34.599238387 +0000 UTC m=+805.149114503 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/a8bd3be4-3c24-4856-a0bb-3efe3d9f716f-memberlist") pod "speaker-tbmhh" (UID: "a8bd3be4-3c24-4856-a0bb-3efe3d9f716f") : secret "metallb-memberlist" not found Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.602686 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a8bd3be4-3c24-4856-a0bb-3efe3d9f716f-metrics-certs\") pod \"speaker-tbmhh\" (UID: \"a8bd3be4-3c24-4856-a0bb-3efe3d9f716f\") " pod="metallb-system/speaker-tbmhh" Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.602832 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4879f7b2-b049-4a47-8d11-0868667299a6-metrics-certs\") pod \"controller-6968d8fdc4-v8k8c\" (UID: \"4879f7b2-b049-4a47-8d11-0868667299a6\") " pod="metallb-system/controller-6968d8fdc4-v8k8c" Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.754643 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-fwqrp" Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.794685 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:33 crc kubenswrapper[4869]: I0130 11:07:33.875811 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-v8k8c" Jan 30 11:07:34 crc kubenswrapper[4869]: I0130 11:07:34.092815 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-v8k8c"] Jan 30 11:07:34 crc kubenswrapper[4869]: I0130 11:07:34.168233 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-fwqrp"] Jan 30 11:07:34 crc kubenswrapper[4869]: W0130 11:07:34.178490 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50962bee_f856_4fd9_95f5_4b697b0212f2.slice/crio-b0f3a17d5e2c7eeebf34aa8a624f657a6d7ac280370a48aa12d27bfcc2b2bb8d WatchSource:0}: Error finding container b0f3a17d5e2c7eeebf34aa8a624f657a6d7ac280370a48aa12d27bfcc2b2bb8d: Status 404 returned error can't find the container with id b0f3a17d5e2c7eeebf34aa8a624f657a6d7ac280370a48aa12d27bfcc2b2bb8d Jan 30 11:07:34 crc kubenswrapper[4869]: I0130 11:07:34.357888 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-v6jfd" event={"ID":"9a5abb53-8c8c-4293-a0ac-594d43f9b703","Type":"ContainerStarted","Data":"cd56e9a4d7098a6cfc1335f18f0fd01b816912c0edb7dd41ff168f62279a5821"} Jan 30 11:07:34 crc kubenswrapper[4869]: I0130 11:07:34.359008 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-fwqrp" event={"ID":"50962bee-f856-4fd9-95f5-4b697b0212f2","Type":"ContainerStarted","Data":"b0f3a17d5e2c7eeebf34aa8a624f657a6d7ac280370a48aa12d27bfcc2b2bb8d"} Jan 30 11:07:34 crc kubenswrapper[4869]: I0130 11:07:34.360469 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-v8k8c" event={"ID":"4879f7b2-b049-4a47-8d11-0868667299a6","Type":"ContainerStarted","Data":"420300fdc2e6c1b40417e4585c93de8b898e01f77155d19dde1583a8fe121e04"} Jan 30 11:07:34 crc kubenswrapper[4869]: I0130 11:07:34.360501 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-v8k8c" event={"ID":"4879f7b2-b049-4a47-8d11-0868667299a6","Type":"ContainerStarted","Data":"19eb4d951ba03852004962df67fd8ba524dd5bea8f04bc96ef4ada22e3eb3382"} Jan 30 11:07:34 crc kubenswrapper[4869]: I0130 11:07:34.360514 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-v8k8c" event={"ID":"4879f7b2-b049-4a47-8d11-0868667299a6","Type":"ContainerStarted","Data":"673ff554ec97f0967d6e50cdf851eccc3021a15473d06250a5ba7eb388a5dd79"} Jan 30 11:07:34 crc kubenswrapper[4869]: I0130 11:07:34.360678 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6968d8fdc4-v8k8c" Jan 30 11:07:34 crc kubenswrapper[4869]: I0130 11:07:34.377543 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6968d8fdc4-v8k8c" podStartSLOduration=2.377527331 podStartE2EDuration="2.377527331s" podCreationTimestamp="2026-01-30 11:07:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:07:34.375246306 +0000 UTC m=+804.925122372" watchObservedRunningTime="2026-01-30 11:07:34.377527331 +0000 UTC m=+804.927403397" Jan 30 11:07:34 crc kubenswrapper[4869]: I0130 11:07:34.611406 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/a8bd3be4-3c24-4856-a0bb-3efe3d9f716f-memberlist\") pod \"speaker-tbmhh\" (UID: \"a8bd3be4-3c24-4856-a0bb-3efe3d9f716f\") " pod="metallb-system/speaker-tbmhh" Jan 30 11:07:34 crc kubenswrapper[4869]: I0130 11:07:34.616836 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/a8bd3be4-3c24-4856-a0bb-3efe3d9f716f-memberlist\") pod \"speaker-tbmhh\" (UID: \"a8bd3be4-3c24-4856-a0bb-3efe3d9f716f\") " pod="metallb-system/speaker-tbmhh" Jan 30 11:07:34 crc kubenswrapper[4869]: I0130 11:07:34.746154 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-tbmhh" Jan 30 11:07:34 crc kubenswrapper[4869]: W0130 11:07:34.771615 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda8bd3be4_3c24_4856_a0bb_3efe3d9f716f.slice/crio-28f85af78146e9d6cb7b16237937daf8c9991db4fbb8acd3914ddb4ce1979ec9 WatchSource:0}: Error finding container 28f85af78146e9d6cb7b16237937daf8c9991db4fbb8acd3914ddb4ce1979ec9: Status 404 returned error can't find the container with id 28f85af78146e9d6cb7b16237937daf8c9991db4fbb8acd3914ddb4ce1979ec9 Jan 30 11:07:35 crc kubenswrapper[4869]: I0130 11:07:35.378299 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-tbmhh" event={"ID":"a8bd3be4-3c24-4856-a0bb-3efe3d9f716f","Type":"ContainerStarted","Data":"7f0c00b571875cd90462fc51f18abf3f9d536ac446e84e586713aa40a60f3165"} Jan 30 11:07:35 crc kubenswrapper[4869]: I0130 11:07:35.378361 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-tbmhh" event={"ID":"a8bd3be4-3c24-4856-a0bb-3efe3d9f716f","Type":"ContainerStarted","Data":"723a66a845f5ef49252566706578bbdf7be128849f920ffafca9a8850e855307"} Jan 30 11:07:35 crc kubenswrapper[4869]: I0130 11:07:35.378375 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-tbmhh" event={"ID":"a8bd3be4-3c24-4856-a0bb-3efe3d9f716f","Type":"ContainerStarted","Data":"28f85af78146e9d6cb7b16237937daf8c9991db4fbb8acd3914ddb4ce1979ec9"} Jan 30 11:07:35 crc kubenswrapper[4869]: I0130 11:07:35.378810 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-tbmhh" Jan 30 11:07:35 crc kubenswrapper[4869]: I0130 11:07:35.404824 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-tbmhh" podStartSLOduration=3.4048032360000002 podStartE2EDuration="3.404803236s" podCreationTimestamp="2026-01-30 11:07:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:07:35.400762872 +0000 UTC m=+805.950638958" watchObservedRunningTime="2026-01-30 11:07:35.404803236 +0000 UTC m=+805.954679302" Jan 30 11:07:42 crc kubenswrapper[4869]: I0130 11:07:42.431086 4869 generic.go:334] "Generic (PLEG): container finished" podID="9a5abb53-8c8c-4293-a0ac-594d43f9b703" containerID="15873f380a3c6d01024538d194cfb83865d631e596fca8e5c5c6ebbe4a477d58" exitCode=0 Jan 30 11:07:42 crc kubenswrapper[4869]: I0130 11:07:42.431145 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-v6jfd" event={"ID":"9a5abb53-8c8c-4293-a0ac-594d43f9b703","Type":"ContainerDied","Data":"15873f380a3c6d01024538d194cfb83865d631e596fca8e5c5c6ebbe4a477d58"} Jan 30 11:07:42 crc kubenswrapper[4869]: I0130 11:07:42.433343 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-fwqrp" event={"ID":"50962bee-f856-4fd9-95f5-4b697b0212f2","Type":"ContainerStarted","Data":"f4e6259c16635b3235d6916ca82c999da1ca7658786c18272a5d510ca4695f26"} Jan 30 11:07:42 crc kubenswrapper[4869]: I0130 11:07:42.434273 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-fwqrp" Jan 30 11:07:42 crc kubenswrapper[4869]: I0130 11:07:42.468516 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-fwqrp" podStartSLOduration=2.491662313 podStartE2EDuration="10.468496326s" podCreationTimestamp="2026-01-30 11:07:32 +0000 UTC" firstStartedPulling="2026-01-30 11:07:34.180228135 +0000 UTC m=+804.730104201" lastFinishedPulling="2026-01-30 11:07:42.157062148 +0000 UTC m=+812.706938214" observedRunningTime="2026-01-30 11:07:42.461546629 +0000 UTC m=+813.011422715" watchObservedRunningTime="2026-01-30 11:07:42.468496326 +0000 UTC m=+813.018372392" Jan 30 11:07:43 crc kubenswrapper[4869]: I0130 11:07:43.441979 4869 generic.go:334] "Generic (PLEG): container finished" podID="9a5abb53-8c8c-4293-a0ac-594d43f9b703" containerID="6dc0669aad6625a86a1f6ee8c2ea594d4df67fd0d737bcb5ea6ee8489b791da4" exitCode=0 Jan 30 11:07:43 crc kubenswrapper[4869]: I0130 11:07:43.442042 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-v6jfd" event={"ID":"9a5abb53-8c8c-4293-a0ac-594d43f9b703","Type":"ContainerDied","Data":"6dc0669aad6625a86a1f6ee8c2ea594d4df67fd0d737bcb5ea6ee8489b791da4"} Jan 30 11:07:44 crc kubenswrapper[4869]: I0130 11:07:44.450026 4869 generic.go:334] "Generic (PLEG): container finished" podID="9a5abb53-8c8c-4293-a0ac-594d43f9b703" containerID="59badfefeff5bef35b6503dc6067e60d3176a6861bc6576e9c1abb7f7ea48fb2" exitCode=0 Jan 30 11:07:44 crc kubenswrapper[4869]: I0130 11:07:44.450143 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-v6jfd" event={"ID":"9a5abb53-8c8c-4293-a0ac-594d43f9b703","Type":"ContainerDied","Data":"59badfefeff5bef35b6503dc6067e60d3176a6861bc6576e9c1abb7f7ea48fb2"} Jan 30 11:07:44 crc kubenswrapper[4869]: I0130 11:07:44.750373 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-tbmhh" Jan 30 11:07:45 crc kubenswrapper[4869]: I0130 11:07:45.468651 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-v6jfd" event={"ID":"9a5abb53-8c8c-4293-a0ac-594d43f9b703","Type":"ContainerStarted","Data":"dfc3fb98596b94647554384fc14c9bea3491a3e214b60a563e3a5df27c20231d"} Jan 30 11:07:45 crc kubenswrapper[4869]: I0130 11:07:45.469042 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-v6jfd" event={"ID":"9a5abb53-8c8c-4293-a0ac-594d43f9b703","Type":"ContainerStarted","Data":"79c5dc31d993a13aa127914c7a48c9eba644fb3b58469a352767efa442283391"} Jan 30 11:07:45 crc kubenswrapper[4869]: I0130 11:07:45.469056 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-v6jfd" event={"ID":"9a5abb53-8c8c-4293-a0ac-594d43f9b703","Type":"ContainerStarted","Data":"5a2c4de4ef41228a319ad3e3e713f60e4282fcac6c61f50e49119c9c2aede2e6"} Jan 30 11:07:45 crc kubenswrapper[4869]: I0130 11:07:45.469068 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-v6jfd" event={"ID":"9a5abb53-8c8c-4293-a0ac-594d43f9b703","Type":"ContainerStarted","Data":"31d3ecf5c4bcede0ab8b070236ea4c738856b606edf4b7c15b2bc8561006a0d7"} Jan 30 11:07:45 crc kubenswrapper[4869]: I0130 11:07:45.469088 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-v6jfd" event={"ID":"9a5abb53-8c8c-4293-a0ac-594d43f9b703","Type":"ContainerStarted","Data":"795677110d5e4564ea30cbde27f71970e2f87fb08ff71f527ea03ef4cc21fc3d"} Jan 30 11:07:46 crc kubenswrapper[4869]: I0130 11:07:46.386610 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4"] Jan 30 11:07:46 crc kubenswrapper[4869]: I0130 11:07:46.387759 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4" Jan 30 11:07:46 crc kubenswrapper[4869]: I0130 11:07:46.390636 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 30 11:07:46 crc kubenswrapper[4869]: I0130 11:07:46.397821 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4"] Jan 30 11:07:46 crc kubenswrapper[4869]: I0130 11:07:46.479836 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-v6jfd" event={"ID":"9a5abb53-8c8c-4293-a0ac-594d43f9b703","Type":"ContainerStarted","Data":"efeebd9e9b052fd3c2781547a3706e0f3259f2e5492419adf25be6c36f7d0a89"} Jan 30 11:07:46 crc kubenswrapper[4869]: I0130 11:07:46.480005 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:46 crc kubenswrapper[4869]: I0130 11:07:46.492625 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n556j\" (UniqueName: \"kubernetes.io/projected/2743e133-1c70-48f2-aa22-6c80e628699d-kube-api-access-n556j\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4\" (UID: \"2743e133-1c70-48f2-aa22-6c80e628699d\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4" Jan 30 11:07:46 crc kubenswrapper[4869]: I0130 11:07:46.492721 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2743e133-1c70-48f2-aa22-6c80e628699d-util\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4\" (UID: \"2743e133-1c70-48f2-aa22-6c80e628699d\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4" Jan 30 11:07:46 crc kubenswrapper[4869]: I0130 11:07:46.492774 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2743e133-1c70-48f2-aa22-6c80e628699d-bundle\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4\" (UID: \"2743e133-1c70-48f2-aa22-6c80e628699d\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4" Jan 30 11:07:46 crc kubenswrapper[4869]: I0130 11:07:46.503417 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-v6jfd" podStartSLOduration=6.255290813 podStartE2EDuration="14.503399993s" podCreationTimestamp="2026-01-30 11:07:32 +0000 UTC" firstStartedPulling="2026-01-30 11:07:33.916402529 +0000 UTC m=+804.466278595" lastFinishedPulling="2026-01-30 11:07:42.164511709 +0000 UTC m=+812.714387775" observedRunningTime="2026-01-30 11:07:46.498606517 +0000 UTC m=+817.048482603" watchObservedRunningTime="2026-01-30 11:07:46.503399993 +0000 UTC m=+817.053276059" Jan 30 11:07:46 crc kubenswrapper[4869]: I0130 11:07:46.594396 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n556j\" (UniqueName: \"kubernetes.io/projected/2743e133-1c70-48f2-aa22-6c80e628699d-kube-api-access-n556j\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4\" (UID: \"2743e133-1c70-48f2-aa22-6c80e628699d\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4" Jan 30 11:07:46 crc kubenswrapper[4869]: I0130 11:07:46.594460 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2743e133-1c70-48f2-aa22-6c80e628699d-util\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4\" (UID: \"2743e133-1c70-48f2-aa22-6c80e628699d\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4" Jan 30 11:07:46 crc kubenswrapper[4869]: I0130 11:07:46.594502 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2743e133-1c70-48f2-aa22-6c80e628699d-bundle\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4\" (UID: \"2743e133-1c70-48f2-aa22-6c80e628699d\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4" Jan 30 11:07:46 crc kubenswrapper[4869]: I0130 11:07:46.595027 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2743e133-1c70-48f2-aa22-6c80e628699d-util\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4\" (UID: \"2743e133-1c70-48f2-aa22-6c80e628699d\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4" Jan 30 11:07:46 crc kubenswrapper[4869]: I0130 11:07:46.595157 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2743e133-1c70-48f2-aa22-6c80e628699d-bundle\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4\" (UID: \"2743e133-1c70-48f2-aa22-6c80e628699d\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4" Jan 30 11:07:46 crc kubenswrapper[4869]: I0130 11:07:46.613177 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n556j\" (UniqueName: \"kubernetes.io/projected/2743e133-1c70-48f2-aa22-6c80e628699d-kube-api-access-n556j\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4\" (UID: \"2743e133-1c70-48f2-aa22-6c80e628699d\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4" Jan 30 11:07:46 crc kubenswrapper[4869]: I0130 11:07:46.703234 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4" Jan 30 11:07:47 crc kubenswrapper[4869]: I0130 11:07:47.083933 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4"] Jan 30 11:07:47 crc kubenswrapper[4869]: I0130 11:07:47.485721 4869 generic.go:334] "Generic (PLEG): container finished" podID="2743e133-1c70-48f2-aa22-6c80e628699d" containerID="ba52e8327b08bc7c12d9764762f0d854412607859bc79ae782395cada0d7f883" exitCode=0 Jan 30 11:07:47 crc kubenswrapper[4869]: I0130 11:07:47.485802 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4" event={"ID":"2743e133-1c70-48f2-aa22-6c80e628699d","Type":"ContainerDied","Data":"ba52e8327b08bc7c12d9764762f0d854412607859bc79ae782395cada0d7f883"} Jan 30 11:07:47 crc kubenswrapper[4869]: I0130 11:07:47.485996 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4" event={"ID":"2743e133-1c70-48f2-aa22-6c80e628699d","Type":"ContainerStarted","Data":"6a49abc55665ff11a6973e33a74874ab95a4120cde686563c2733761e558fb86"} Jan 30 11:07:48 crc kubenswrapper[4869]: I0130 11:07:48.794933 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:48 crc kubenswrapper[4869]: I0130 11:07:48.840633 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:07:51 crc kubenswrapper[4869]: I0130 11:07:51.511101 4869 generic.go:334] "Generic (PLEG): container finished" podID="2743e133-1c70-48f2-aa22-6c80e628699d" containerID="763d38ccc656f8f066a6c87b993f526c10c5eba1e5fd0ff22d607151f68b6f0d" exitCode=0 Jan 30 11:07:51 crc kubenswrapper[4869]: I0130 11:07:51.511229 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4" event={"ID":"2743e133-1c70-48f2-aa22-6c80e628699d","Type":"ContainerDied","Data":"763d38ccc656f8f066a6c87b993f526c10c5eba1e5fd0ff22d607151f68b6f0d"} Jan 30 11:07:51 crc kubenswrapper[4869]: I0130 11:07:51.769825 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:07:51 crc kubenswrapper[4869]: I0130 11:07:51.769911 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:07:52 crc kubenswrapper[4869]: I0130 11:07:52.520792 4869 generic.go:334] "Generic (PLEG): container finished" podID="2743e133-1c70-48f2-aa22-6c80e628699d" containerID="d36bb59ea0d80d107194b1e038eeabe06209f4d16a1949399d2bc2ad991fe4a0" exitCode=0 Jan 30 11:07:52 crc kubenswrapper[4869]: I0130 11:07:52.520910 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4" event={"ID":"2743e133-1c70-48f2-aa22-6c80e628699d","Type":"ContainerDied","Data":"d36bb59ea0d80d107194b1e038eeabe06209f4d16a1949399d2bc2ad991fe4a0"} Jan 30 11:07:53 crc kubenswrapper[4869]: I0130 11:07:53.763775 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-fwqrp" Jan 30 11:07:53 crc kubenswrapper[4869]: I0130 11:07:53.805316 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4" Jan 30 11:07:53 crc kubenswrapper[4869]: I0130 11:07:53.879491 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6968d8fdc4-v8k8c" Jan 30 11:07:53 crc kubenswrapper[4869]: I0130 11:07:53.892595 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2743e133-1c70-48f2-aa22-6c80e628699d-util\") pod \"2743e133-1c70-48f2-aa22-6c80e628699d\" (UID: \"2743e133-1c70-48f2-aa22-6c80e628699d\") " Jan 30 11:07:53 crc kubenswrapper[4869]: I0130 11:07:53.892675 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2743e133-1c70-48f2-aa22-6c80e628699d-bundle\") pod \"2743e133-1c70-48f2-aa22-6c80e628699d\" (UID: \"2743e133-1c70-48f2-aa22-6c80e628699d\") " Jan 30 11:07:53 crc kubenswrapper[4869]: I0130 11:07:53.892801 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n556j\" (UniqueName: \"kubernetes.io/projected/2743e133-1c70-48f2-aa22-6c80e628699d-kube-api-access-n556j\") pod \"2743e133-1c70-48f2-aa22-6c80e628699d\" (UID: \"2743e133-1c70-48f2-aa22-6c80e628699d\") " Jan 30 11:07:53 crc kubenswrapper[4869]: I0130 11:07:53.893880 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2743e133-1c70-48f2-aa22-6c80e628699d-bundle" (OuterVolumeSpecName: "bundle") pod "2743e133-1c70-48f2-aa22-6c80e628699d" (UID: "2743e133-1c70-48f2-aa22-6c80e628699d"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:07:53 crc kubenswrapper[4869]: I0130 11:07:53.903910 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2743e133-1c70-48f2-aa22-6c80e628699d-util" (OuterVolumeSpecName: "util") pod "2743e133-1c70-48f2-aa22-6c80e628699d" (UID: "2743e133-1c70-48f2-aa22-6c80e628699d"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:07:53 crc kubenswrapper[4869]: I0130 11:07:53.905895 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2743e133-1c70-48f2-aa22-6c80e628699d-kube-api-access-n556j" (OuterVolumeSpecName: "kube-api-access-n556j") pod "2743e133-1c70-48f2-aa22-6c80e628699d" (UID: "2743e133-1c70-48f2-aa22-6c80e628699d"). InnerVolumeSpecName "kube-api-access-n556j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:07:53 crc kubenswrapper[4869]: I0130 11:07:53.994610 4869 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2743e133-1c70-48f2-aa22-6c80e628699d-util\") on node \"crc\" DevicePath \"\"" Jan 30 11:07:53 crc kubenswrapper[4869]: I0130 11:07:53.994648 4869 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2743e133-1c70-48f2-aa22-6c80e628699d-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:07:53 crc kubenswrapper[4869]: I0130 11:07:53.994659 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n556j\" (UniqueName: \"kubernetes.io/projected/2743e133-1c70-48f2-aa22-6c80e628699d-kube-api-access-n556j\") on node \"crc\" DevicePath \"\"" Jan 30 11:07:54 crc kubenswrapper[4869]: I0130 11:07:54.534798 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4" event={"ID":"2743e133-1c70-48f2-aa22-6c80e628699d","Type":"ContainerDied","Data":"6a49abc55665ff11a6973e33a74874ab95a4120cde686563c2733761e558fb86"} Jan 30 11:07:54 crc kubenswrapper[4869]: I0130 11:07:54.534847 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a49abc55665ff11a6973e33a74874ab95a4120cde686563c2733761e558fb86" Jan 30 11:07:54 crc kubenswrapper[4869]: I0130 11:07:54.534858 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4" Jan 30 11:07:59 crc kubenswrapper[4869]: I0130 11:07:59.037999 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-h4x9l"] Jan 30 11:07:59 crc kubenswrapper[4869]: E0130 11:07:59.038481 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2743e133-1c70-48f2-aa22-6c80e628699d" containerName="pull" Jan 30 11:07:59 crc kubenswrapper[4869]: I0130 11:07:59.038493 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="2743e133-1c70-48f2-aa22-6c80e628699d" containerName="pull" Jan 30 11:07:59 crc kubenswrapper[4869]: E0130 11:07:59.038501 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2743e133-1c70-48f2-aa22-6c80e628699d" containerName="extract" Jan 30 11:07:59 crc kubenswrapper[4869]: I0130 11:07:59.038509 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="2743e133-1c70-48f2-aa22-6c80e628699d" containerName="extract" Jan 30 11:07:59 crc kubenswrapper[4869]: E0130 11:07:59.038520 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2743e133-1c70-48f2-aa22-6c80e628699d" containerName="util" Jan 30 11:07:59 crc kubenswrapper[4869]: I0130 11:07:59.038525 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="2743e133-1c70-48f2-aa22-6c80e628699d" containerName="util" Jan 30 11:07:59 crc kubenswrapper[4869]: I0130 11:07:59.038618 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="2743e133-1c70-48f2-aa22-6c80e628699d" containerName="extract" Jan 30 11:07:59 crc kubenswrapper[4869]: I0130 11:07:59.039023 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-h4x9l" Jan 30 11:07:59 crc kubenswrapper[4869]: I0130 11:07:59.042979 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Jan 30 11:07:59 crc kubenswrapper[4869]: I0130 11:07:59.046296 4869 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-ccw2r" Jan 30 11:07:59 crc kubenswrapper[4869]: I0130 11:07:59.046653 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Jan 30 11:07:59 crc kubenswrapper[4869]: I0130 11:07:59.068773 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-h4x9l"] Jan 30 11:07:59 crc kubenswrapper[4869]: I0130 11:07:59.178512 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zm2p5\" (UniqueName: \"kubernetes.io/projected/13643f2f-d64e-408b-97c3-1766b1893a38-kube-api-access-zm2p5\") pod \"cert-manager-operator-controller-manager-66c8bdd694-h4x9l\" (UID: \"13643f2f-d64e-408b-97c3-1766b1893a38\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-h4x9l" Jan 30 11:07:59 crc kubenswrapper[4869]: I0130 11:07:59.178580 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/13643f2f-d64e-408b-97c3-1766b1893a38-tmp\") pod \"cert-manager-operator-controller-manager-66c8bdd694-h4x9l\" (UID: \"13643f2f-d64e-408b-97c3-1766b1893a38\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-h4x9l" Jan 30 11:07:59 crc kubenswrapper[4869]: I0130 11:07:59.280346 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zm2p5\" (UniqueName: \"kubernetes.io/projected/13643f2f-d64e-408b-97c3-1766b1893a38-kube-api-access-zm2p5\") pod \"cert-manager-operator-controller-manager-66c8bdd694-h4x9l\" (UID: \"13643f2f-d64e-408b-97c3-1766b1893a38\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-h4x9l" Jan 30 11:07:59 crc kubenswrapper[4869]: I0130 11:07:59.280457 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/13643f2f-d64e-408b-97c3-1766b1893a38-tmp\") pod \"cert-manager-operator-controller-manager-66c8bdd694-h4x9l\" (UID: \"13643f2f-d64e-408b-97c3-1766b1893a38\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-h4x9l" Jan 30 11:07:59 crc kubenswrapper[4869]: I0130 11:07:59.281060 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/13643f2f-d64e-408b-97c3-1766b1893a38-tmp\") pod \"cert-manager-operator-controller-manager-66c8bdd694-h4x9l\" (UID: \"13643f2f-d64e-408b-97c3-1766b1893a38\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-h4x9l" Jan 30 11:07:59 crc kubenswrapper[4869]: I0130 11:07:59.301732 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zm2p5\" (UniqueName: \"kubernetes.io/projected/13643f2f-d64e-408b-97c3-1766b1893a38-kube-api-access-zm2p5\") pod \"cert-manager-operator-controller-manager-66c8bdd694-h4x9l\" (UID: \"13643f2f-d64e-408b-97c3-1766b1893a38\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-h4x9l" Jan 30 11:07:59 crc kubenswrapper[4869]: I0130 11:07:59.358287 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-h4x9l" Jan 30 11:07:59 crc kubenswrapper[4869]: I0130 11:07:59.655173 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-h4x9l"] Jan 30 11:07:59 crc kubenswrapper[4869]: W0130 11:07:59.662032 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod13643f2f_d64e_408b_97c3_1766b1893a38.slice/crio-69bf1a84ba3b1f935365d88107931fa5228d5372344563869e0bc86e6118c7ee WatchSource:0}: Error finding container 69bf1a84ba3b1f935365d88107931fa5228d5372344563869e0bc86e6118c7ee: Status 404 returned error can't find the container with id 69bf1a84ba3b1f935365d88107931fa5228d5372344563869e0bc86e6118c7ee Jan 30 11:08:00 crc kubenswrapper[4869]: I0130 11:08:00.568185 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-h4x9l" event={"ID":"13643f2f-d64e-408b-97c3-1766b1893a38","Type":"ContainerStarted","Data":"69bf1a84ba3b1f935365d88107931fa5228d5372344563869e0bc86e6118c7ee"} Jan 30 11:08:03 crc kubenswrapper[4869]: I0130 11:08:03.586732 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-h4x9l" event={"ID":"13643f2f-d64e-408b-97c3-1766b1893a38","Type":"ContainerStarted","Data":"0d916dbb07cf1b30f07d2c45d5b84b11d5afc997d8ba5bfb48a958228a79dbda"} Jan 30 11:08:03 crc kubenswrapper[4869]: I0130 11:08:03.836414 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-v6jfd" Jan 30 11:08:03 crc kubenswrapper[4869]: I0130 11:08:03.864237 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-h4x9l" podStartSLOduration=1.728333611 podStartE2EDuration="4.864218586s" podCreationTimestamp="2026-01-30 11:07:59 +0000 UTC" firstStartedPulling="2026-01-30 11:07:59.665119745 +0000 UTC m=+830.214995811" lastFinishedPulling="2026-01-30 11:08:02.80100472 +0000 UTC m=+833.350880786" observedRunningTime="2026-01-30 11:08:03.606559386 +0000 UTC m=+834.156435452" watchObservedRunningTime="2026-01-30 11:08:03.864218586 +0000 UTC m=+834.414094652" Jan 30 11:08:09 crc kubenswrapper[4869]: I0130 11:08:09.600657 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-5545bd876-hncd5"] Jan 30 11:08:09 crc kubenswrapper[4869]: I0130 11:08:09.601907 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-5545bd876-hncd5" Jan 30 11:08:09 crc kubenswrapper[4869]: I0130 11:08:09.605660 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Jan 30 11:08:09 crc kubenswrapper[4869]: I0130 11:08:09.605719 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Jan 30 11:08:09 crc kubenswrapper[4869]: I0130 11:08:09.606070 4869 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-cdt86" Jan 30 11:08:09 crc kubenswrapper[4869]: I0130 11:08:09.611423 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-5545bd876-hncd5"] Jan 30 11:08:09 crc kubenswrapper[4869]: I0130 11:08:09.720358 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tqs8\" (UniqueName: \"kubernetes.io/projected/d400b3b0-9a2e-4034-8215-c6007a68665a-kube-api-access-8tqs8\") pod \"cert-manager-cainjector-5545bd876-hncd5\" (UID: \"d400b3b0-9a2e-4034-8215-c6007a68665a\") " pod="cert-manager/cert-manager-cainjector-5545bd876-hncd5" Jan 30 11:08:09 crc kubenswrapper[4869]: I0130 11:08:09.720460 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d400b3b0-9a2e-4034-8215-c6007a68665a-bound-sa-token\") pod \"cert-manager-cainjector-5545bd876-hncd5\" (UID: \"d400b3b0-9a2e-4034-8215-c6007a68665a\") " pod="cert-manager/cert-manager-cainjector-5545bd876-hncd5" Jan 30 11:08:09 crc kubenswrapper[4869]: I0130 11:08:09.822084 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d400b3b0-9a2e-4034-8215-c6007a68665a-bound-sa-token\") pod \"cert-manager-cainjector-5545bd876-hncd5\" (UID: \"d400b3b0-9a2e-4034-8215-c6007a68665a\") " pod="cert-manager/cert-manager-cainjector-5545bd876-hncd5" Jan 30 11:08:09 crc kubenswrapper[4869]: I0130 11:08:09.822177 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tqs8\" (UniqueName: \"kubernetes.io/projected/d400b3b0-9a2e-4034-8215-c6007a68665a-kube-api-access-8tqs8\") pod \"cert-manager-cainjector-5545bd876-hncd5\" (UID: \"d400b3b0-9a2e-4034-8215-c6007a68665a\") " pod="cert-manager/cert-manager-cainjector-5545bd876-hncd5" Jan 30 11:08:09 crc kubenswrapper[4869]: I0130 11:08:09.839668 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tqs8\" (UniqueName: \"kubernetes.io/projected/d400b3b0-9a2e-4034-8215-c6007a68665a-kube-api-access-8tqs8\") pod \"cert-manager-cainjector-5545bd876-hncd5\" (UID: \"d400b3b0-9a2e-4034-8215-c6007a68665a\") " pod="cert-manager/cert-manager-cainjector-5545bd876-hncd5" Jan 30 11:08:09 crc kubenswrapper[4869]: I0130 11:08:09.842374 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d400b3b0-9a2e-4034-8215-c6007a68665a-bound-sa-token\") pod \"cert-manager-cainjector-5545bd876-hncd5\" (UID: \"d400b3b0-9a2e-4034-8215-c6007a68665a\") " pod="cert-manager/cert-manager-cainjector-5545bd876-hncd5" Jan 30 11:08:09 crc kubenswrapper[4869]: I0130 11:08:09.929457 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-5545bd876-hncd5" Jan 30 11:08:10 crc kubenswrapper[4869]: I0130 11:08:10.324416 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-5545bd876-hncd5"] Jan 30 11:08:10 crc kubenswrapper[4869]: W0130 11:08:10.333453 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd400b3b0_9a2e_4034_8215_c6007a68665a.slice/crio-949e338bf5f02c765d3e4a4ad1176db1a72963955cbbd35aabcc1b7b0d866bef WatchSource:0}: Error finding container 949e338bf5f02c765d3e4a4ad1176db1a72963955cbbd35aabcc1b7b0d866bef: Status 404 returned error can't find the container with id 949e338bf5f02c765d3e4a4ad1176db1a72963955cbbd35aabcc1b7b0d866bef Jan 30 11:08:10 crc kubenswrapper[4869]: I0130 11:08:10.628455 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-5545bd876-hncd5" event={"ID":"d400b3b0-9a2e-4034-8215-c6007a68665a","Type":"ContainerStarted","Data":"949e338bf5f02c765d3e4a4ad1176db1a72963955cbbd35aabcc1b7b0d866bef"} Jan 30 11:08:12 crc kubenswrapper[4869]: I0130 11:08:12.379111 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-6888856db4-m4p9s"] Jan 30 11:08:12 crc kubenswrapper[4869]: I0130 11:08:12.380420 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-6888856db4-m4p9s" Jan 30 11:08:12 crc kubenswrapper[4869]: I0130 11:08:12.385559 4869 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-mwttn" Jan 30 11:08:12 crc kubenswrapper[4869]: I0130 11:08:12.386750 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-6888856db4-m4p9s"] Jan 30 11:08:12 crc kubenswrapper[4869]: I0130 11:08:12.459065 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e7ef7489-3636-49d3-8cc0-a56c617dc974-bound-sa-token\") pod \"cert-manager-webhook-6888856db4-m4p9s\" (UID: \"e7ef7489-3636-49d3-8cc0-a56c617dc974\") " pod="cert-manager/cert-manager-webhook-6888856db4-m4p9s" Jan 30 11:08:12 crc kubenswrapper[4869]: I0130 11:08:12.459126 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rxdb\" (UniqueName: \"kubernetes.io/projected/e7ef7489-3636-49d3-8cc0-a56c617dc974-kube-api-access-9rxdb\") pod \"cert-manager-webhook-6888856db4-m4p9s\" (UID: \"e7ef7489-3636-49d3-8cc0-a56c617dc974\") " pod="cert-manager/cert-manager-webhook-6888856db4-m4p9s" Jan 30 11:08:12 crc kubenswrapper[4869]: I0130 11:08:12.561563 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e7ef7489-3636-49d3-8cc0-a56c617dc974-bound-sa-token\") pod \"cert-manager-webhook-6888856db4-m4p9s\" (UID: \"e7ef7489-3636-49d3-8cc0-a56c617dc974\") " pod="cert-manager/cert-manager-webhook-6888856db4-m4p9s" Jan 30 11:08:12 crc kubenswrapper[4869]: I0130 11:08:12.561644 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rxdb\" (UniqueName: \"kubernetes.io/projected/e7ef7489-3636-49d3-8cc0-a56c617dc974-kube-api-access-9rxdb\") pod \"cert-manager-webhook-6888856db4-m4p9s\" (UID: \"e7ef7489-3636-49d3-8cc0-a56c617dc974\") " pod="cert-manager/cert-manager-webhook-6888856db4-m4p9s" Jan 30 11:08:12 crc kubenswrapper[4869]: I0130 11:08:12.580426 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rxdb\" (UniqueName: \"kubernetes.io/projected/e7ef7489-3636-49d3-8cc0-a56c617dc974-kube-api-access-9rxdb\") pod \"cert-manager-webhook-6888856db4-m4p9s\" (UID: \"e7ef7489-3636-49d3-8cc0-a56c617dc974\") " pod="cert-manager/cert-manager-webhook-6888856db4-m4p9s" Jan 30 11:08:12 crc kubenswrapper[4869]: I0130 11:08:12.581497 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e7ef7489-3636-49d3-8cc0-a56c617dc974-bound-sa-token\") pod \"cert-manager-webhook-6888856db4-m4p9s\" (UID: \"e7ef7489-3636-49d3-8cc0-a56c617dc974\") " pod="cert-manager/cert-manager-webhook-6888856db4-m4p9s" Jan 30 11:08:12 crc kubenswrapper[4869]: I0130 11:08:12.698413 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-6888856db4-m4p9s" Jan 30 11:08:13 crc kubenswrapper[4869]: I0130 11:08:13.218870 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-6888856db4-m4p9s"] Jan 30 11:08:13 crc kubenswrapper[4869]: I0130 11:08:13.645219 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-6888856db4-m4p9s" event={"ID":"e7ef7489-3636-49d3-8cc0-a56c617dc974","Type":"ContainerStarted","Data":"18d906f573245d4eab4023742c066bfddd475a2848d7b6444786f30e66367af4"} Jan 30 11:08:15 crc kubenswrapper[4869]: I0130 11:08:15.658750 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-6888856db4-m4p9s" event={"ID":"e7ef7489-3636-49d3-8cc0-a56c617dc974","Type":"ContainerStarted","Data":"5c2e63145b535da7f791efe5dbd248dadb30e68fcec990aa8fd9940110970645"} Jan 30 11:08:15 crc kubenswrapper[4869]: I0130 11:08:15.659071 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-6888856db4-m4p9s" Jan 30 11:08:15 crc kubenswrapper[4869]: I0130 11:08:15.660490 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-5545bd876-hncd5" event={"ID":"d400b3b0-9a2e-4034-8215-c6007a68665a","Type":"ContainerStarted","Data":"b320e61081015beebe5386cbdbbf6fdb08291ce1fb42c20dee27dad20e067a78"} Jan 30 11:08:15 crc kubenswrapper[4869]: I0130 11:08:15.674652 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-6888856db4-m4p9s" podStartSLOduration=1.539213597 podStartE2EDuration="3.674633716s" podCreationTimestamp="2026-01-30 11:08:12 +0000 UTC" firstStartedPulling="2026-01-30 11:08:13.228669803 +0000 UTC m=+843.778545869" lastFinishedPulling="2026-01-30 11:08:15.364089922 +0000 UTC m=+845.913965988" observedRunningTime="2026-01-30 11:08:15.673011259 +0000 UTC m=+846.222887345" watchObservedRunningTime="2026-01-30 11:08:15.674633716 +0000 UTC m=+846.224509782" Jan 30 11:08:15 crc kubenswrapper[4869]: I0130 11:08:15.688222 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-5545bd876-hncd5" podStartSLOduration=1.65845494 podStartE2EDuration="6.688204861s" podCreationTimestamp="2026-01-30 11:08:09 +0000 UTC" firstStartedPulling="2026-01-30 11:08:10.335772932 +0000 UTC m=+840.885648998" lastFinishedPulling="2026-01-30 11:08:15.365522853 +0000 UTC m=+845.915398919" observedRunningTime="2026-01-30 11:08:15.686497313 +0000 UTC m=+846.236373379" watchObservedRunningTime="2026-01-30 11:08:15.688204861 +0000 UTC m=+846.238080927" Jan 30 11:08:21 crc kubenswrapper[4869]: I0130 11:08:21.769133 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:08:21 crc kubenswrapper[4869]: I0130 11:08:21.769744 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:08:21 crc kubenswrapper[4869]: I0130 11:08:21.769941 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 11:08:21 crc kubenswrapper[4869]: I0130 11:08:21.770572 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c5808a41780bbab079aa25dfbf774b2278de2a0be2251b6799239067a10cf14f"} pod="openshift-machine-config-operator/machine-config-daemon-99lr2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 11:08:21 crc kubenswrapper[4869]: I0130 11:08:21.770632 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" containerID="cri-o://c5808a41780bbab079aa25dfbf774b2278de2a0be2251b6799239067a10cf14f" gracePeriod=600 Jan 30 11:08:22 crc kubenswrapper[4869]: I0130 11:08:22.698541 4869 generic.go:334] "Generic (PLEG): container finished" podID="ef13186b-7f82-4025-97e3-d899be8c207f" containerID="c5808a41780bbab079aa25dfbf774b2278de2a0be2251b6799239067a10cf14f" exitCode=0 Jan 30 11:08:22 crc kubenswrapper[4869]: I0130 11:08:22.698611 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerDied","Data":"c5808a41780bbab079aa25dfbf774b2278de2a0be2251b6799239067a10cf14f"} Jan 30 11:08:22 crc kubenswrapper[4869]: I0130 11:08:22.698933 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerStarted","Data":"4ad08bd30e3f392ec90da3d9d390933399838d7e5f9e5bf41ffe126d804a3058"} Jan 30 11:08:22 crc kubenswrapper[4869]: I0130 11:08:22.698951 4869 scope.go:117] "RemoveContainer" containerID="b42a25ce06c4520062876a9a0c419b39f2083601578538371202936d74ce51ae" Jan 30 11:08:22 crc kubenswrapper[4869]: I0130 11:08:22.701471 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-6888856db4-m4p9s" Jan 30 11:08:25 crc kubenswrapper[4869]: I0130 11:08:25.914429 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-545d4d4674-wpxsv"] Jan 30 11:08:25 crc kubenswrapper[4869]: I0130 11:08:25.916568 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-545d4d4674-wpxsv" Jan 30 11:08:25 crc kubenswrapper[4869]: I0130 11:08:25.918318 4869 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-qcx9b" Jan 30 11:08:25 crc kubenswrapper[4869]: I0130 11:08:25.932235 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-545d4d4674-wpxsv"] Jan 30 11:08:26 crc kubenswrapper[4869]: I0130 11:08:26.073103 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a3620bab-0584-4515-aeda-9aac66fc26d1-bound-sa-token\") pod \"cert-manager-545d4d4674-wpxsv\" (UID: \"a3620bab-0584-4515-aeda-9aac66fc26d1\") " pod="cert-manager/cert-manager-545d4d4674-wpxsv" Jan 30 11:08:26 crc kubenswrapper[4869]: I0130 11:08:26.073181 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlg94\" (UniqueName: \"kubernetes.io/projected/a3620bab-0584-4515-aeda-9aac66fc26d1-kube-api-access-tlg94\") pod \"cert-manager-545d4d4674-wpxsv\" (UID: \"a3620bab-0584-4515-aeda-9aac66fc26d1\") " pod="cert-manager/cert-manager-545d4d4674-wpxsv" Jan 30 11:08:26 crc kubenswrapper[4869]: I0130 11:08:26.173979 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlg94\" (UniqueName: \"kubernetes.io/projected/a3620bab-0584-4515-aeda-9aac66fc26d1-kube-api-access-tlg94\") pod \"cert-manager-545d4d4674-wpxsv\" (UID: \"a3620bab-0584-4515-aeda-9aac66fc26d1\") " pod="cert-manager/cert-manager-545d4d4674-wpxsv" Jan 30 11:08:26 crc kubenswrapper[4869]: I0130 11:08:26.174118 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a3620bab-0584-4515-aeda-9aac66fc26d1-bound-sa-token\") pod \"cert-manager-545d4d4674-wpxsv\" (UID: \"a3620bab-0584-4515-aeda-9aac66fc26d1\") " pod="cert-manager/cert-manager-545d4d4674-wpxsv" Jan 30 11:08:26 crc kubenswrapper[4869]: I0130 11:08:26.192072 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a3620bab-0584-4515-aeda-9aac66fc26d1-bound-sa-token\") pod \"cert-manager-545d4d4674-wpxsv\" (UID: \"a3620bab-0584-4515-aeda-9aac66fc26d1\") " pod="cert-manager/cert-manager-545d4d4674-wpxsv" Jan 30 11:08:26 crc kubenswrapper[4869]: I0130 11:08:26.192286 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlg94\" (UniqueName: \"kubernetes.io/projected/a3620bab-0584-4515-aeda-9aac66fc26d1-kube-api-access-tlg94\") pod \"cert-manager-545d4d4674-wpxsv\" (UID: \"a3620bab-0584-4515-aeda-9aac66fc26d1\") " pod="cert-manager/cert-manager-545d4d4674-wpxsv" Jan 30 11:08:26 crc kubenswrapper[4869]: I0130 11:08:26.243276 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-545d4d4674-wpxsv" Jan 30 11:08:26 crc kubenswrapper[4869]: I0130 11:08:26.449252 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-545d4d4674-wpxsv"] Jan 30 11:08:26 crc kubenswrapper[4869]: W0130 11:08:26.453403 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3620bab_0584_4515_aeda_9aac66fc26d1.slice/crio-a93b85011a561723906e912a20ad3b709fbdb6a91180d85474d849badf10e43b WatchSource:0}: Error finding container a93b85011a561723906e912a20ad3b709fbdb6a91180d85474d849badf10e43b: Status 404 returned error can't find the container with id a93b85011a561723906e912a20ad3b709fbdb6a91180d85474d849badf10e43b Jan 30 11:08:26 crc kubenswrapper[4869]: I0130 11:08:26.726453 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-545d4d4674-wpxsv" event={"ID":"a3620bab-0584-4515-aeda-9aac66fc26d1","Type":"ContainerStarted","Data":"c40793a1e377831c3387d61b54927dab4559a46ce9585d2bdf2414b6825df678"} Jan 30 11:08:26 crc kubenswrapper[4869]: I0130 11:08:26.726785 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-545d4d4674-wpxsv" event={"ID":"a3620bab-0584-4515-aeda-9aac66fc26d1","Type":"ContainerStarted","Data":"a93b85011a561723906e912a20ad3b709fbdb6a91180d85474d849badf10e43b"} Jan 30 11:08:26 crc kubenswrapper[4869]: I0130 11:08:26.751238 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-545d4d4674-wpxsv" podStartSLOduration=1.751205776 podStartE2EDuration="1.751205776s" podCreationTimestamp="2026-01-30 11:08:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:08:26.743495547 +0000 UTC m=+857.293371633" watchObservedRunningTime="2026-01-30 11:08:26.751205776 +0000 UTC m=+857.301081892" Jan 30 11:08:35 crc kubenswrapper[4869]: I0130 11:08:35.888990 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-jchx8"] Jan 30 11:08:35 crc kubenswrapper[4869]: I0130 11:08:35.890431 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jchx8" Jan 30 11:08:35 crc kubenswrapper[4869]: I0130 11:08:35.896212 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Jan 30 11:08:35 crc kubenswrapper[4869]: I0130 11:08:35.896394 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-nzkvn" Jan 30 11:08:35 crc kubenswrapper[4869]: I0130 11:08:35.896507 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Jan 30 11:08:35 crc kubenswrapper[4869]: I0130 11:08:35.908026 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-jchx8"] Jan 30 11:08:35 crc kubenswrapper[4869]: I0130 11:08:35.987411 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qtgk\" (UniqueName: \"kubernetes.io/projected/ab0106d6-2bdb-4dce-8146-2cfebf822626-kube-api-access-6qtgk\") pod \"openstack-operator-index-jchx8\" (UID: \"ab0106d6-2bdb-4dce-8146-2cfebf822626\") " pod="openstack-operators/openstack-operator-index-jchx8" Jan 30 11:08:36 crc kubenswrapper[4869]: I0130 11:08:36.088646 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qtgk\" (UniqueName: \"kubernetes.io/projected/ab0106d6-2bdb-4dce-8146-2cfebf822626-kube-api-access-6qtgk\") pod \"openstack-operator-index-jchx8\" (UID: \"ab0106d6-2bdb-4dce-8146-2cfebf822626\") " pod="openstack-operators/openstack-operator-index-jchx8" Jan 30 11:08:36 crc kubenswrapper[4869]: I0130 11:08:36.106893 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qtgk\" (UniqueName: \"kubernetes.io/projected/ab0106d6-2bdb-4dce-8146-2cfebf822626-kube-api-access-6qtgk\") pod \"openstack-operator-index-jchx8\" (UID: \"ab0106d6-2bdb-4dce-8146-2cfebf822626\") " pod="openstack-operators/openstack-operator-index-jchx8" Jan 30 11:08:36 crc kubenswrapper[4869]: I0130 11:08:36.207672 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jchx8" Jan 30 11:08:36 crc kubenswrapper[4869]: I0130 11:08:36.612489 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-jchx8"] Jan 30 11:08:36 crc kubenswrapper[4869]: W0130 11:08:36.615762 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podab0106d6_2bdb_4dce_8146_2cfebf822626.slice/crio-9ea6a095b0662d9d880333c127ea0e7e6cd3d24bee860da08dc548efca490313 WatchSource:0}: Error finding container 9ea6a095b0662d9d880333c127ea0e7e6cd3d24bee860da08dc548efca490313: Status 404 returned error can't find the container with id 9ea6a095b0662d9d880333c127ea0e7e6cd3d24bee860da08dc548efca490313 Jan 30 11:08:36 crc kubenswrapper[4869]: I0130 11:08:36.782050 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jchx8" event={"ID":"ab0106d6-2bdb-4dce-8146-2cfebf822626","Type":"ContainerStarted","Data":"9ea6a095b0662d9d880333c127ea0e7e6cd3d24bee860da08dc548efca490313"} Jan 30 11:08:38 crc kubenswrapper[4869]: I0130 11:08:38.469078 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-jchx8"] Jan 30 11:08:39 crc kubenswrapper[4869]: I0130 11:08:39.075985 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-zz5jk"] Jan 30 11:08:39 crc kubenswrapper[4869]: I0130 11:08:39.077083 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-zz5jk" Jan 30 11:08:39 crc kubenswrapper[4869]: I0130 11:08:39.085341 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-zz5jk"] Jan 30 11:08:39 crc kubenswrapper[4869]: I0130 11:08:39.235612 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zhz8\" (UniqueName: \"kubernetes.io/projected/3d6e0112-9414-4306-965b-721b67035025-kube-api-access-4zhz8\") pod \"openstack-operator-index-zz5jk\" (UID: \"3d6e0112-9414-4306-965b-721b67035025\") " pod="openstack-operators/openstack-operator-index-zz5jk" Jan 30 11:08:39 crc kubenswrapper[4869]: I0130 11:08:39.337750 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zhz8\" (UniqueName: \"kubernetes.io/projected/3d6e0112-9414-4306-965b-721b67035025-kube-api-access-4zhz8\") pod \"openstack-operator-index-zz5jk\" (UID: \"3d6e0112-9414-4306-965b-721b67035025\") " pod="openstack-operators/openstack-operator-index-zz5jk" Jan 30 11:08:39 crc kubenswrapper[4869]: I0130 11:08:39.355767 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zhz8\" (UniqueName: \"kubernetes.io/projected/3d6e0112-9414-4306-965b-721b67035025-kube-api-access-4zhz8\") pod \"openstack-operator-index-zz5jk\" (UID: \"3d6e0112-9414-4306-965b-721b67035025\") " pod="openstack-operators/openstack-operator-index-zz5jk" Jan 30 11:08:39 crc kubenswrapper[4869]: I0130 11:08:39.403655 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-zz5jk" Jan 30 11:08:39 crc kubenswrapper[4869]: I0130 11:08:39.804427 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jchx8" event={"ID":"ab0106d6-2bdb-4dce-8146-2cfebf822626","Type":"ContainerStarted","Data":"66cfae34beec6b2971969604bdaeac0d4911e87f46e0c4c37c305d6ee7f7346a"} Jan 30 11:08:39 crc kubenswrapper[4869]: I0130 11:08:39.804577 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-jchx8" podUID="ab0106d6-2bdb-4dce-8146-2cfebf822626" containerName="registry-server" containerID="cri-o://66cfae34beec6b2971969604bdaeac0d4911e87f46e0c4c37c305d6ee7f7346a" gracePeriod=2 Jan 30 11:08:39 crc kubenswrapper[4869]: I0130 11:08:39.821091 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-jchx8" podStartSLOduration=2.548938733 podStartE2EDuration="4.821071458s" podCreationTimestamp="2026-01-30 11:08:35 +0000 UTC" firstStartedPulling="2026-01-30 11:08:36.618127218 +0000 UTC m=+867.168003284" lastFinishedPulling="2026-01-30 11:08:38.890259943 +0000 UTC m=+869.440136009" observedRunningTime="2026-01-30 11:08:39.817739683 +0000 UTC m=+870.367615769" watchObservedRunningTime="2026-01-30 11:08:39.821071458 +0000 UTC m=+870.370947524" Jan 30 11:08:39 crc kubenswrapper[4869]: I0130 11:08:39.881565 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-zz5jk"] Jan 30 11:08:39 crc kubenswrapper[4869]: W0130 11:08:39.899986 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3d6e0112_9414_4306_965b_721b67035025.slice/crio-ac576e47f758f9c1a80b8fc8704eab457640557cd22c75360e3b3cf2308fdd79 WatchSource:0}: Error finding container ac576e47f758f9c1a80b8fc8704eab457640557cd22c75360e3b3cf2308fdd79: Status 404 returned error can't find the container with id ac576e47f758f9c1a80b8fc8704eab457640557cd22c75360e3b3cf2308fdd79 Jan 30 11:08:40 crc kubenswrapper[4869]: I0130 11:08:40.155060 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jchx8" Jan 30 11:08:40 crc kubenswrapper[4869]: I0130 11:08:40.249873 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qtgk\" (UniqueName: \"kubernetes.io/projected/ab0106d6-2bdb-4dce-8146-2cfebf822626-kube-api-access-6qtgk\") pod \"ab0106d6-2bdb-4dce-8146-2cfebf822626\" (UID: \"ab0106d6-2bdb-4dce-8146-2cfebf822626\") " Jan 30 11:08:40 crc kubenswrapper[4869]: I0130 11:08:40.254599 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab0106d6-2bdb-4dce-8146-2cfebf822626-kube-api-access-6qtgk" (OuterVolumeSpecName: "kube-api-access-6qtgk") pod "ab0106d6-2bdb-4dce-8146-2cfebf822626" (UID: "ab0106d6-2bdb-4dce-8146-2cfebf822626"). InnerVolumeSpecName "kube-api-access-6qtgk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:08:40 crc kubenswrapper[4869]: I0130 11:08:40.351536 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qtgk\" (UniqueName: \"kubernetes.io/projected/ab0106d6-2bdb-4dce-8146-2cfebf822626-kube-api-access-6qtgk\") on node \"crc\" DevicePath \"\"" Jan 30 11:08:40 crc kubenswrapper[4869]: I0130 11:08:40.809792 4869 generic.go:334] "Generic (PLEG): container finished" podID="ab0106d6-2bdb-4dce-8146-2cfebf822626" containerID="66cfae34beec6b2971969604bdaeac0d4911e87f46e0c4c37c305d6ee7f7346a" exitCode=0 Jan 30 11:08:40 crc kubenswrapper[4869]: I0130 11:08:40.809833 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jchx8" event={"ID":"ab0106d6-2bdb-4dce-8146-2cfebf822626","Type":"ContainerDied","Data":"66cfae34beec6b2971969604bdaeac0d4911e87f46e0c4c37c305d6ee7f7346a"} Jan 30 11:08:40 crc kubenswrapper[4869]: I0130 11:08:40.809872 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jchx8" event={"ID":"ab0106d6-2bdb-4dce-8146-2cfebf822626","Type":"ContainerDied","Data":"9ea6a095b0662d9d880333c127ea0e7e6cd3d24bee860da08dc548efca490313"} Jan 30 11:08:40 crc kubenswrapper[4869]: I0130 11:08:40.809871 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jchx8" Jan 30 11:08:40 crc kubenswrapper[4869]: I0130 11:08:40.809891 4869 scope.go:117] "RemoveContainer" containerID="66cfae34beec6b2971969604bdaeac0d4911e87f46e0c4c37c305d6ee7f7346a" Jan 30 11:08:40 crc kubenswrapper[4869]: I0130 11:08:40.812784 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-zz5jk" event={"ID":"3d6e0112-9414-4306-965b-721b67035025","Type":"ContainerStarted","Data":"1570f23ccf679b0b647a4c316c02b22becef2b502b5a6b98119334101cdd0ade"} Jan 30 11:08:40 crc kubenswrapper[4869]: I0130 11:08:40.812819 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-zz5jk" event={"ID":"3d6e0112-9414-4306-965b-721b67035025","Type":"ContainerStarted","Data":"ac576e47f758f9c1a80b8fc8704eab457640557cd22c75360e3b3cf2308fdd79"} Jan 30 11:08:40 crc kubenswrapper[4869]: I0130 11:08:40.826702 4869 scope.go:117] "RemoveContainer" containerID="66cfae34beec6b2971969604bdaeac0d4911e87f46e0c4c37c305d6ee7f7346a" Jan 30 11:08:40 crc kubenswrapper[4869]: E0130 11:08:40.827126 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66cfae34beec6b2971969604bdaeac0d4911e87f46e0c4c37c305d6ee7f7346a\": container with ID starting with 66cfae34beec6b2971969604bdaeac0d4911e87f46e0c4c37c305d6ee7f7346a not found: ID does not exist" containerID="66cfae34beec6b2971969604bdaeac0d4911e87f46e0c4c37c305d6ee7f7346a" Jan 30 11:08:40 crc kubenswrapper[4869]: I0130 11:08:40.827161 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66cfae34beec6b2971969604bdaeac0d4911e87f46e0c4c37c305d6ee7f7346a"} err="failed to get container status \"66cfae34beec6b2971969604bdaeac0d4911e87f46e0c4c37c305d6ee7f7346a\": rpc error: code = NotFound desc = could not find container \"66cfae34beec6b2971969604bdaeac0d4911e87f46e0c4c37c305d6ee7f7346a\": container with ID starting with 66cfae34beec6b2971969604bdaeac0d4911e87f46e0c4c37c305d6ee7f7346a not found: ID does not exist" Jan 30 11:08:40 crc kubenswrapper[4869]: I0130 11:08:40.830454 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-zz5jk" podStartSLOduration=1.7810969829999999 podStartE2EDuration="1.830434385s" podCreationTimestamp="2026-01-30 11:08:39 +0000 UTC" firstStartedPulling="2026-01-30 11:08:39.905851526 +0000 UTC m=+870.455727592" lastFinishedPulling="2026-01-30 11:08:39.955188928 +0000 UTC m=+870.505064994" observedRunningTime="2026-01-30 11:08:40.829950582 +0000 UTC m=+871.379826658" watchObservedRunningTime="2026-01-30 11:08:40.830434385 +0000 UTC m=+871.380310451" Jan 30 11:08:40 crc kubenswrapper[4869]: I0130 11:08:40.843519 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-jchx8"] Jan 30 11:08:40 crc kubenswrapper[4869]: I0130 11:08:40.847364 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-jchx8"] Jan 30 11:08:42 crc kubenswrapper[4869]: I0130 11:08:42.139860 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab0106d6-2bdb-4dce-8146-2cfebf822626" path="/var/lib/kubelet/pods/ab0106d6-2bdb-4dce-8146-2cfebf822626/volumes" Jan 30 11:08:49 crc kubenswrapper[4869]: I0130 11:08:49.404739 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-zz5jk" Jan 30 11:08:49 crc kubenswrapper[4869]: I0130 11:08:49.405310 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-zz5jk" Jan 30 11:08:49 crc kubenswrapper[4869]: I0130 11:08:49.433233 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-zz5jk" Jan 30 11:08:49 crc kubenswrapper[4869]: I0130 11:08:49.886298 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-zz5jk" Jan 30 11:08:51 crc kubenswrapper[4869]: I0130 11:08:51.107408 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb"] Jan 30 11:08:51 crc kubenswrapper[4869]: E0130 11:08:51.108847 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab0106d6-2bdb-4dce-8146-2cfebf822626" containerName="registry-server" Jan 30 11:08:51 crc kubenswrapper[4869]: I0130 11:08:51.108959 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab0106d6-2bdb-4dce-8146-2cfebf822626" containerName="registry-server" Jan 30 11:08:51 crc kubenswrapper[4869]: I0130 11:08:51.109168 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab0106d6-2bdb-4dce-8146-2cfebf822626" containerName="registry-server" Jan 30 11:08:51 crc kubenswrapper[4869]: I0130 11:08:51.110304 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb" Jan 30 11:08:51 crc kubenswrapper[4869]: I0130 11:08:51.114837 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-lj7md" Jan 30 11:08:51 crc kubenswrapper[4869]: I0130 11:08:51.116908 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb"] Jan 30 11:08:51 crc kubenswrapper[4869]: I0130 11:08:51.284591 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrxfp\" (UniqueName: \"kubernetes.io/projected/56568718-0940-4bb5-be9e-842cef4a2cdb-kube-api-access-vrxfp\") pod \"b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb\" (UID: \"56568718-0940-4bb5-be9e-842cef4a2cdb\") " pod="openstack-operators/b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb" Jan 30 11:08:51 crc kubenswrapper[4869]: I0130 11:08:51.284703 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/56568718-0940-4bb5-be9e-842cef4a2cdb-util\") pod \"b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb\" (UID: \"56568718-0940-4bb5-be9e-842cef4a2cdb\") " pod="openstack-operators/b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb" Jan 30 11:08:51 crc kubenswrapper[4869]: I0130 11:08:51.284768 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/56568718-0940-4bb5-be9e-842cef4a2cdb-bundle\") pod \"b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb\" (UID: \"56568718-0940-4bb5-be9e-842cef4a2cdb\") " pod="openstack-operators/b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb" Jan 30 11:08:51 crc kubenswrapper[4869]: I0130 11:08:51.385987 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/56568718-0940-4bb5-be9e-842cef4a2cdb-util\") pod \"b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb\" (UID: \"56568718-0940-4bb5-be9e-842cef4a2cdb\") " pod="openstack-operators/b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb" Jan 30 11:08:51 crc kubenswrapper[4869]: I0130 11:08:51.386051 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/56568718-0940-4bb5-be9e-842cef4a2cdb-bundle\") pod \"b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb\" (UID: \"56568718-0940-4bb5-be9e-842cef4a2cdb\") " pod="openstack-operators/b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb" Jan 30 11:08:51 crc kubenswrapper[4869]: I0130 11:08:51.386089 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrxfp\" (UniqueName: \"kubernetes.io/projected/56568718-0940-4bb5-be9e-842cef4a2cdb-kube-api-access-vrxfp\") pod \"b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb\" (UID: \"56568718-0940-4bb5-be9e-842cef4a2cdb\") " pod="openstack-operators/b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb" Jan 30 11:08:51 crc kubenswrapper[4869]: I0130 11:08:51.386547 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/56568718-0940-4bb5-be9e-842cef4a2cdb-util\") pod \"b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb\" (UID: \"56568718-0940-4bb5-be9e-842cef4a2cdb\") " pod="openstack-operators/b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb" Jan 30 11:08:51 crc kubenswrapper[4869]: I0130 11:08:51.386674 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/56568718-0940-4bb5-be9e-842cef4a2cdb-bundle\") pod \"b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb\" (UID: \"56568718-0940-4bb5-be9e-842cef4a2cdb\") " pod="openstack-operators/b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb" Jan 30 11:08:51 crc kubenswrapper[4869]: I0130 11:08:51.412386 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrxfp\" (UniqueName: \"kubernetes.io/projected/56568718-0940-4bb5-be9e-842cef4a2cdb-kube-api-access-vrxfp\") pod \"b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb\" (UID: \"56568718-0940-4bb5-be9e-842cef4a2cdb\") " pod="openstack-operators/b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb" Jan 30 11:08:51 crc kubenswrapper[4869]: I0130 11:08:51.430939 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb" Jan 30 11:08:51 crc kubenswrapper[4869]: I0130 11:08:51.831122 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb"] Jan 30 11:08:51 crc kubenswrapper[4869]: I0130 11:08:51.878809 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb" event={"ID":"56568718-0940-4bb5-be9e-842cef4a2cdb","Type":"ContainerStarted","Data":"118b145bcee6cb6707c04c0ebedfa3971edd9f1b789b1f06ead3798af3d2f7a7"} Jan 30 11:08:52 crc kubenswrapper[4869]: I0130 11:08:52.891480 4869 generic.go:334] "Generic (PLEG): container finished" podID="56568718-0940-4bb5-be9e-842cef4a2cdb" containerID="58ade8943fd04f5b185df3a50150549a124657bc96b4fc5f746e6afc91d8a5b5" exitCode=0 Jan 30 11:08:52 crc kubenswrapper[4869]: I0130 11:08:52.891845 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb" event={"ID":"56568718-0940-4bb5-be9e-842cef4a2cdb","Type":"ContainerDied","Data":"58ade8943fd04f5b185df3a50150549a124657bc96b4fc5f746e6afc91d8a5b5"} Jan 30 11:08:53 crc kubenswrapper[4869]: I0130 11:08:53.899898 4869 generic.go:334] "Generic (PLEG): container finished" podID="56568718-0940-4bb5-be9e-842cef4a2cdb" containerID="e2df68adf4dc00069c6f01fa19882b81f5f7797113811980946473b57a5e417a" exitCode=0 Jan 30 11:08:53 crc kubenswrapper[4869]: I0130 11:08:53.899946 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb" event={"ID":"56568718-0940-4bb5-be9e-842cef4a2cdb","Type":"ContainerDied","Data":"e2df68adf4dc00069c6f01fa19882b81f5f7797113811980946473b57a5e417a"} Jan 30 11:08:54 crc kubenswrapper[4869]: I0130 11:08:54.919163 4869 generic.go:334] "Generic (PLEG): container finished" podID="56568718-0940-4bb5-be9e-842cef4a2cdb" containerID="49977cc9032397182d59c10f15a82203c2bfc4d081a0c898dda1563d97583d45" exitCode=0 Jan 30 11:08:54 crc kubenswrapper[4869]: I0130 11:08:54.919224 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb" event={"ID":"56568718-0940-4bb5-be9e-842cef4a2cdb","Type":"ContainerDied","Data":"49977cc9032397182d59c10f15a82203c2bfc4d081a0c898dda1563d97583d45"} Jan 30 11:08:56 crc kubenswrapper[4869]: I0130 11:08:56.206139 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb" Jan 30 11:08:56 crc kubenswrapper[4869]: I0130 11:08:56.383781 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/56568718-0940-4bb5-be9e-842cef4a2cdb-bundle\") pod \"56568718-0940-4bb5-be9e-842cef4a2cdb\" (UID: \"56568718-0940-4bb5-be9e-842cef4a2cdb\") " Jan 30 11:08:56 crc kubenswrapper[4869]: I0130 11:08:56.383864 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vrxfp\" (UniqueName: \"kubernetes.io/projected/56568718-0940-4bb5-be9e-842cef4a2cdb-kube-api-access-vrxfp\") pod \"56568718-0940-4bb5-be9e-842cef4a2cdb\" (UID: \"56568718-0940-4bb5-be9e-842cef4a2cdb\") " Jan 30 11:08:56 crc kubenswrapper[4869]: I0130 11:08:56.383902 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/56568718-0940-4bb5-be9e-842cef4a2cdb-util\") pod \"56568718-0940-4bb5-be9e-842cef4a2cdb\" (UID: \"56568718-0940-4bb5-be9e-842cef4a2cdb\") " Jan 30 11:08:56 crc kubenswrapper[4869]: I0130 11:08:56.384403 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56568718-0940-4bb5-be9e-842cef4a2cdb-bundle" (OuterVolumeSpecName: "bundle") pod "56568718-0940-4bb5-be9e-842cef4a2cdb" (UID: "56568718-0940-4bb5-be9e-842cef4a2cdb"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:08:56 crc kubenswrapper[4869]: I0130 11:08:56.388428 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56568718-0940-4bb5-be9e-842cef4a2cdb-kube-api-access-vrxfp" (OuterVolumeSpecName: "kube-api-access-vrxfp") pod "56568718-0940-4bb5-be9e-842cef4a2cdb" (UID: "56568718-0940-4bb5-be9e-842cef4a2cdb"). InnerVolumeSpecName "kube-api-access-vrxfp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:08:56 crc kubenswrapper[4869]: I0130 11:08:56.398433 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56568718-0940-4bb5-be9e-842cef4a2cdb-util" (OuterVolumeSpecName: "util") pod "56568718-0940-4bb5-be9e-842cef4a2cdb" (UID: "56568718-0940-4bb5-be9e-842cef4a2cdb"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:08:56 crc kubenswrapper[4869]: I0130 11:08:56.486917 4869 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/56568718-0940-4bb5-be9e-842cef4a2cdb-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:08:56 crc kubenswrapper[4869]: I0130 11:08:56.486954 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vrxfp\" (UniqueName: \"kubernetes.io/projected/56568718-0940-4bb5-be9e-842cef4a2cdb-kube-api-access-vrxfp\") on node \"crc\" DevicePath \"\"" Jan 30 11:08:56 crc kubenswrapper[4869]: I0130 11:08:56.486969 4869 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/56568718-0940-4bb5-be9e-842cef4a2cdb-util\") on node \"crc\" DevicePath \"\"" Jan 30 11:08:56 crc kubenswrapper[4869]: I0130 11:08:56.935348 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb" event={"ID":"56568718-0940-4bb5-be9e-842cef4a2cdb","Type":"ContainerDied","Data":"118b145bcee6cb6707c04c0ebedfa3971edd9f1b789b1f06ead3798af3d2f7a7"} Jan 30 11:08:56 crc kubenswrapper[4869]: I0130 11:08:56.935759 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="118b145bcee6cb6707c04c0ebedfa3971edd9f1b789b1f06ead3798af3d2f7a7" Jan 30 11:08:56 crc kubenswrapper[4869]: I0130 11:08:56.935433 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb" Jan 30 11:09:04 crc kubenswrapper[4869]: I0130 11:09:04.289465 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-init-595d55cff7-wpkm2"] Jan 30 11:09:04 crc kubenswrapper[4869]: E0130 11:09:04.290072 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56568718-0940-4bb5-be9e-842cef4a2cdb" containerName="pull" Jan 30 11:09:04 crc kubenswrapper[4869]: I0130 11:09:04.290089 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="56568718-0940-4bb5-be9e-842cef4a2cdb" containerName="pull" Jan 30 11:09:04 crc kubenswrapper[4869]: E0130 11:09:04.290114 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56568718-0940-4bb5-be9e-842cef4a2cdb" containerName="extract" Jan 30 11:09:04 crc kubenswrapper[4869]: I0130 11:09:04.290121 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="56568718-0940-4bb5-be9e-842cef4a2cdb" containerName="extract" Jan 30 11:09:04 crc kubenswrapper[4869]: E0130 11:09:04.290133 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56568718-0940-4bb5-be9e-842cef4a2cdb" containerName="util" Jan 30 11:09:04 crc kubenswrapper[4869]: I0130 11:09:04.290142 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="56568718-0940-4bb5-be9e-842cef4a2cdb" containerName="util" Jan 30 11:09:04 crc kubenswrapper[4869]: I0130 11:09:04.290280 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="56568718-0940-4bb5-be9e-842cef4a2cdb" containerName="extract" Jan 30 11:09:04 crc kubenswrapper[4869]: I0130 11:09:04.290788 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-595d55cff7-wpkm2" Jan 30 11:09:04 crc kubenswrapper[4869]: I0130 11:09:04.296409 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-init-dockercfg-kzmwv" Jan 30 11:09:04 crc kubenswrapper[4869]: I0130 11:09:04.325830 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-595d55cff7-wpkm2"] Jan 30 11:09:04 crc kubenswrapper[4869]: I0130 11:09:04.384518 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfj6z\" (UniqueName: \"kubernetes.io/projected/d1cf852a-16c5-4391-a885-c91bf7cb80e3-kube-api-access-zfj6z\") pod \"openstack-operator-controller-init-595d55cff7-wpkm2\" (UID: \"d1cf852a-16c5-4391-a885-c91bf7cb80e3\") " pod="openstack-operators/openstack-operator-controller-init-595d55cff7-wpkm2" Jan 30 11:09:04 crc kubenswrapper[4869]: I0130 11:09:04.485672 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfj6z\" (UniqueName: \"kubernetes.io/projected/d1cf852a-16c5-4391-a885-c91bf7cb80e3-kube-api-access-zfj6z\") pod \"openstack-operator-controller-init-595d55cff7-wpkm2\" (UID: \"d1cf852a-16c5-4391-a885-c91bf7cb80e3\") " pod="openstack-operators/openstack-operator-controller-init-595d55cff7-wpkm2" Jan 30 11:09:04 crc kubenswrapper[4869]: I0130 11:09:04.503687 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfj6z\" (UniqueName: \"kubernetes.io/projected/d1cf852a-16c5-4391-a885-c91bf7cb80e3-kube-api-access-zfj6z\") pod \"openstack-operator-controller-init-595d55cff7-wpkm2\" (UID: \"d1cf852a-16c5-4391-a885-c91bf7cb80e3\") " pod="openstack-operators/openstack-operator-controller-init-595d55cff7-wpkm2" Jan 30 11:09:04 crc kubenswrapper[4869]: I0130 11:09:04.610924 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-595d55cff7-wpkm2" Jan 30 11:09:05 crc kubenswrapper[4869]: I0130 11:09:05.154539 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-595d55cff7-wpkm2"] Jan 30 11:09:05 crc kubenswrapper[4869]: I0130 11:09:05.994835 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-595d55cff7-wpkm2" event={"ID":"d1cf852a-16c5-4391-a885-c91bf7cb80e3","Type":"ContainerStarted","Data":"af7add5c350280bbae633532afce9aa3fc1ed315c949bf5a1ce19d7020bc137d"} Jan 30 11:09:10 crc kubenswrapper[4869]: I0130 11:09:10.017832 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-595d55cff7-wpkm2" event={"ID":"d1cf852a-16c5-4391-a885-c91bf7cb80e3","Type":"ContainerStarted","Data":"cc0aad48b2dee8de2c32b288674caf9db8efb2a4c105876a6361caa1a76080a7"} Jan 30 11:09:10 crc kubenswrapper[4869]: I0130 11:09:10.018323 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-init-595d55cff7-wpkm2" Jan 30 11:09:10 crc kubenswrapper[4869]: I0130 11:09:10.050968 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-init-595d55cff7-wpkm2" podStartSLOduration=1.751946744 podStartE2EDuration="6.050952334s" podCreationTimestamp="2026-01-30 11:09:04 +0000 UTC" firstStartedPulling="2026-01-30 11:09:05.159578534 +0000 UTC m=+895.709454600" lastFinishedPulling="2026-01-30 11:09:09.458584124 +0000 UTC m=+900.008460190" observedRunningTime="2026-01-30 11:09:10.047144836 +0000 UTC m=+900.597020912" watchObservedRunningTime="2026-01-30 11:09:10.050952334 +0000 UTC m=+900.600828410" Jan 30 11:09:14 crc kubenswrapper[4869]: I0130 11:09:14.614375 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-init-595d55cff7-wpkm2" Jan 30 11:09:16 crc kubenswrapper[4869]: I0130 11:09:16.369427 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-d56l5"] Jan 30 11:09:16 crc kubenswrapper[4869]: I0130 11:09:16.370963 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d56l5" Jan 30 11:09:16 crc kubenswrapper[4869]: I0130 11:09:16.393292 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d56l5"] Jan 30 11:09:16 crc kubenswrapper[4869]: I0130 11:09:16.471399 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48e98cbc-623a-404b-a3dd-4f46720da438-utilities\") pod \"redhat-marketplace-d56l5\" (UID: \"48e98cbc-623a-404b-a3dd-4f46720da438\") " pod="openshift-marketplace/redhat-marketplace-d56l5" Jan 30 11:09:16 crc kubenswrapper[4869]: I0130 11:09:16.471470 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48e98cbc-623a-404b-a3dd-4f46720da438-catalog-content\") pod \"redhat-marketplace-d56l5\" (UID: \"48e98cbc-623a-404b-a3dd-4f46720da438\") " pod="openshift-marketplace/redhat-marketplace-d56l5" Jan 30 11:09:16 crc kubenswrapper[4869]: I0130 11:09:16.471616 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-449sd\" (UniqueName: \"kubernetes.io/projected/48e98cbc-623a-404b-a3dd-4f46720da438-kube-api-access-449sd\") pod \"redhat-marketplace-d56l5\" (UID: \"48e98cbc-623a-404b-a3dd-4f46720da438\") " pod="openshift-marketplace/redhat-marketplace-d56l5" Jan 30 11:09:16 crc kubenswrapper[4869]: I0130 11:09:16.572834 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48e98cbc-623a-404b-a3dd-4f46720da438-utilities\") pod \"redhat-marketplace-d56l5\" (UID: \"48e98cbc-623a-404b-a3dd-4f46720da438\") " pod="openshift-marketplace/redhat-marketplace-d56l5" Jan 30 11:09:16 crc kubenswrapper[4869]: I0130 11:09:16.572950 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48e98cbc-623a-404b-a3dd-4f46720da438-catalog-content\") pod \"redhat-marketplace-d56l5\" (UID: \"48e98cbc-623a-404b-a3dd-4f46720da438\") " pod="openshift-marketplace/redhat-marketplace-d56l5" Jan 30 11:09:16 crc kubenswrapper[4869]: I0130 11:09:16.572994 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-449sd\" (UniqueName: \"kubernetes.io/projected/48e98cbc-623a-404b-a3dd-4f46720da438-kube-api-access-449sd\") pod \"redhat-marketplace-d56l5\" (UID: \"48e98cbc-623a-404b-a3dd-4f46720da438\") " pod="openshift-marketplace/redhat-marketplace-d56l5" Jan 30 11:09:16 crc kubenswrapper[4869]: I0130 11:09:16.573813 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48e98cbc-623a-404b-a3dd-4f46720da438-utilities\") pod \"redhat-marketplace-d56l5\" (UID: \"48e98cbc-623a-404b-a3dd-4f46720da438\") " pod="openshift-marketplace/redhat-marketplace-d56l5" Jan 30 11:09:16 crc kubenswrapper[4869]: I0130 11:09:16.574073 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48e98cbc-623a-404b-a3dd-4f46720da438-catalog-content\") pod \"redhat-marketplace-d56l5\" (UID: \"48e98cbc-623a-404b-a3dd-4f46720da438\") " pod="openshift-marketplace/redhat-marketplace-d56l5" Jan 30 11:09:16 crc kubenswrapper[4869]: I0130 11:09:16.593256 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-449sd\" (UniqueName: \"kubernetes.io/projected/48e98cbc-623a-404b-a3dd-4f46720da438-kube-api-access-449sd\") pod \"redhat-marketplace-d56l5\" (UID: \"48e98cbc-623a-404b-a3dd-4f46720da438\") " pod="openshift-marketplace/redhat-marketplace-d56l5" Jan 30 11:09:16 crc kubenswrapper[4869]: I0130 11:09:16.685218 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d56l5" Jan 30 11:09:17 crc kubenswrapper[4869]: I0130 11:09:17.133699 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d56l5"] Jan 30 11:09:18 crc kubenswrapper[4869]: I0130 11:09:18.070123 4869 generic.go:334] "Generic (PLEG): container finished" podID="48e98cbc-623a-404b-a3dd-4f46720da438" containerID="b3314f352c62b597c95eea53f45c04403b6ab7783255d8f3c475b57a48830b45" exitCode=0 Jan 30 11:09:18 crc kubenswrapper[4869]: I0130 11:09:18.070391 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d56l5" event={"ID":"48e98cbc-623a-404b-a3dd-4f46720da438","Type":"ContainerDied","Data":"b3314f352c62b597c95eea53f45c04403b6ab7783255d8f3c475b57a48830b45"} Jan 30 11:09:18 crc kubenswrapper[4869]: I0130 11:09:18.070419 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d56l5" event={"ID":"48e98cbc-623a-404b-a3dd-4f46720da438","Type":"ContainerStarted","Data":"d8b09b467c5d83cd824ce4419a2d9819509b3e6f7042cf74e90ed57580463d2f"} Jan 30 11:09:19 crc kubenswrapper[4869]: I0130 11:09:19.087614 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d56l5" event={"ID":"48e98cbc-623a-404b-a3dd-4f46720da438","Type":"ContainerStarted","Data":"69e60604d4183ddd3fa4a4f90f87fc197922a0b317ead1dce1beb45e5590b743"} Jan 30 11:09:20 crc kubenswrapper[4869]: I0130 11:09:20.120401 4869 generic.go:334] "Generic (PLEG): container finished" podID="48e98cbc-623a-404b-a3dd-4f46720da438" containerID="69e60604d4183ddd3fa4a4f90f87fc197922a0b317ead1dce1beb45e5590b743" exitCode=0 Jan 30 11:09:20 crc kubenswrapper[4869]: I0130 11:09:20.120460 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d56l5" event={"ID":"48e98cbc-623a-404b-a3dd-4f46720da438","Type":"ContainerDied","Data":"69e60604d4183ddd3fa4a4f90f87fc197922a0b317ead1dce1beb45e5590b743"} Jan 30 11:09:20 crc kubenswrapper[4869]: I0130 11:09:20.958787 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-t2mgt"] Jan 30 11:09:20 crc kubenswrapper[4869]: I0130 11:09:20.960625 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t2mgt" Jan 30 11:09:20 crc kubenswrapper[4869]: I0130 11:09:20.973041 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-t2mgt"] Jan 30 11:09:21 crc kubenswrapper[4869]: I0130 11:09:21.035465 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53519751-f7c0-4f7c-8a3e-eea53b107ac5-catalog-content\") pod \"community-operators-t2mgt\" (UID: \"53519751-f7c0-4f7c-8a3e-eea53b107ac5\") " pod="openshift-marketplace/community-operators-t2mgt" Jan 30 11:09:21 crc kubenswrapper[4869]: I0130 11:09:21.035552 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53519751-f7c0-4f7c-8a3e-eea53b107ac5-utilities\") pod \"community-operators-t2mgt\" (UID: \"53519751-f7c0-4f7c-8a3e-eea53b107ac5\") " pod="openshift-marketplace/community-operators-t2mgt" Jan 30 11:09:21 crc kubenswrapper[4869]: I0130 11:09:21.035617 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qxsb\" (UniqueName: \"kubernetes.io/projected/53519751-f7c0-4f7c-8a3e-eea53b107ac5-kube-api-access-4qxsb\") pod \"community-operators-t2mgt\" (UID: \"53519751-f7c0-4f7c-8a3e-eea53b107ac5\") " pod="openshift-marketplace/community-operators-t2mgt" Jan 30 11:09:21 crc kubenswrapper[4869]: I0130 11:09:21.128522 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d56l5" event={"ID":"48e98cbc-623a-404b-a3dd-4f46720da438","Type":"ContainerStarted","Data":"47d2c5c37f2c249896122e0402b4dcc569ed96d18e7c5fb00d0d43dcba0af4da"} Jan 30 11:09:21 crc kubenswrapper[4869]: I0130 11:09:21.137438 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53519751-f7c0-4f7c-8a3e-eea53b107ac5-catalog-content\") pod \"community-operators-t2mgt\" (UID: \"53519751-f7c0-4f7c-8a3e-eea53b107ac5\") " pod="openshift-marketplace/community-operators-t2mgt" Jan 30 11:09:21 crc kubenswrapper[4869]: I0130 11:09:21.137497 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53519751-f7c0-4f7c-8a3e-eea53b107ac5-utilities\") pod \"community-operators-t2mgt\" (UID: \"53519751-f7c0-4f7c-8a3e-eea53b107ac5\") " pod="openshift-marketplace/community-operators-t2mgt" Jan 30 11:09:21 crc kubenswrapper[4869]: I0130 11:09:21.137549 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qxsb\" (UniqueName: \"kubernetes.io/projected/53519751-f7c0-4f7c-8a3e-eea53b107ac5-kube-api-access-4qxsb\") pod \"community-operators-t2mgt\" (UID: \"53519751-f7c0-4f7c-8a3e-eea53b107ac5\") " pod="openshift-marketplace/community-operators-t2mgt" Jan 30 11:09:21 crc kubenswrapper[4869]: I0130 11:09:21.138148 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53519751-f7c0-4f7c-8a3e-eea53b107ac5-catalog-content\") pod \"community-operators-t2mgt\" (UID: \"53519751-f7c0-4f7c-8a3e-eea53b107ac5\") " pod="openshift-marketplace/community-operators-t2mgt" Jan 30 11:09:21 crc kubenswrapper[4869]: I0130 11:09:21.138195 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53519751-f7c0-4f7c-8a3e-eea53b107ac5-utilities\") pod \"community-operators-t2mgt\" (UID: \"53519751-f7c0-4f7c-8a3e-eea53b107ac5\") " pod="openshift-marketplace/community-operators-t2mgt" Jan 30 11:09:21 crc kubenswrapper[4869]: I0130 11:09:21.150558 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-d56l5" podStartSLOduration=2.673172484 podStartE2EDuration="5.150539539s" podCreationTimestamp="2026-01-30 11:09:16 +0000 UTC" firstStartedPulling="2026-01-30 11:09:18.071748496 +0000 UTC m=+908.621624562" lastFinishedPulling="2026-01-30 11:09:20.549115551 +0000 UTC m=+911.098991617" observedRunningTime="2026-01-30 11:09:21.147689038 +0000 UTC m=+911.697565124" watchObservedRunningTime="2026-01-30 11:09:21.150539539 +0000 UTC m=+911.700415605" Jan 30 11:09:21 crc kubenswrapper[4869]: I0130 11:09:21.162745 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qxsb\" (UniqueName: \"kubernetes.io/projected/53519751-f7c0-4f7c-8a3e-eea53b107ac5-kube-api-access-4qxsb\") pod \"community-operators-t2mgt\" (UID: \"53519751-f7c0-4f7c-8a3e-eea53b107ac5\") " pod="openshift-marketplace/community-operators-t2mgt" Jan 30 11:09:21 crc kubenswrapper[4869]: I0130 11:09:21.276627 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t2mgt" Jan 30 11:09:21 crc kubenswrapper[4869]: I0130 11:09:21.789236 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-t2mgt"] Jan 30 11:09:22 crc kubenswrapper[4869]: I0130 11:09:22.141807 4869 generic.go:334] "Generic (PLEG): container finished" podID="53519751-f7c0-4f7c-8a3e-eea53b107ac5" containerID="0b43e6242cbd9907eed84e82242bc3eafeee9fce40a9a6876941d7b884a3f80d" exitCode=0 Jan 30 11:09:22 crc kubenswrapper[4869]: I0130 11:09:22.143030 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t2mgt" event={"ID":"53519751-f7c0-4f7c-8a3e-eea53b107ac5","Type":"ContainerDied","Data":"0b43e6242cbd9907eed84e82242bc3eafeee9fce40a9a6876941d7b884a3f80d"} Jan 30 11:09:22 crc kubenswrapper[4869]: I0130 11:09:22.143053 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t2mgt" event={"ID":"53519751-f7c0-4f7c-8a3e-eea53b107ac5","Type":"ContainerStarted","Data":"7d3fe0c3de9a568070c622317ee96bc5604b36daaae71e58fcb0ad01f7935fad"} Jan 30 11:09:24 crc kubenswrapper[4869]: I0130 11:09:24.153178 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t2mgt" event={"ID":"53519751-f7c0-4f7c-8a3e-eea53b107ac5","Type":"ContainerStarted","Data":"55ffe1c93383c98324767eaaefcac338619832d42d0924387e0a53cd4b776072"} Jan 30 11:09:25 crc kubenswrapper[4869]: I0130 11:09:25.160278 4869 generic.go:334] "Generic (PLEG): container finished" podID="53519751-f7c0-4f7c-8a3e-eea53b107ac5" containerID="55ffe1c93383c98324767eaaefcac338619832d42d0924387e0a53cd4b776072" exitCode=0 Jan 30 11:09:25 crc kubenswrapper[4869]: I0130 11:09:25.160562 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t2mgt" event={"ID":"53519751-f7c0-4f7c-8a3e-eea53b107ac5","Type":"ContainerDied","Data":"55ffe1c93383c98324767eaaefcac338619832d42d0924387e0a53cd4b776072"} Jan 30 11:09:26 crc kubenswrapper[4869]: I0130 11:09:26.167225 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t2mgt" event={"ID":"53519751-f7c0-4f7c-8a3e-eea53b107ac5","Type":"ContainerStarted","Data":"792e9406ea138473af74dbe1003420fac7362a786ccda7bd1e92741d70007847"} Jan 30 11:09:26 crc kubenswrapper[4869]: I0130 11:09:26.192181 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-t2mgt" podStartSLOduration=2.7200373190000002 podStartE2EDuration="6.192162709s" podCreationTimestamp="2026-01-30 11:09:20 +0000 UTC" firstStartedPulling="2026-01-30 11:09:22.143782868 +0000 UTC m=+912.693658934" lastFinishedPulling="2026-01-30 11:09:25.615908258 +0000 UTC m=+916.165784324" observedRunningTime="2026-01-30 11:09:26.186130378 +0000 UTC m=+916.736006454" watchObservedRunningTime="2026-01-30 11:09:26.192162709 +0000 UTC m=+916.742038775" Jan 30 11:09:26 crc kubenswrapper[4869]: I0130 11:09:26.685871 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-d56l5" Jan 30 11:09:26 crc kubenswrapper[4869]: I0130 11:09:26.686198 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-d56l5" Jan 30 11:09:26 crc kubenswrapper[4869]: I0130 11:09:26.754897 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-d56l5" Jan 30 11:09:27 crc kubenswrapper[4869]: I0130 11:09:27.219471 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-d56l5" Jan 30 11:09:28 crc kubenswrapper[4869]: I0130 11:09:28.752697 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d56l5"] Jan 30 11:09:29 crc kubenswrapper[4869]: I0130 11:09:29.187765 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-d56l5" podUID="48e98cbc-623a-404b-a3dd-4f46720da438" containerName="registry-server" containerID="cri-o://47d2c5c37f2c249896122e0402b4dcc569ed96d18e7c5fb00d0d43dcba0af4da" gracePeriod=2 Jan 30 11:09:29 crc kubenswrapper[4869]: I0130 11:09:29.583726 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d56l5" Jan 30 11:09:29 crc kubenswrapper[4869]: I0130 11:09:29.660027 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48e98cbc-623a-404b-a3dd-4f46720da438-catalog-content\") pod \"48e98cbc-623a-404b-a3dd-4f46720da438\" (UID: \"48e98cbc-623a-404b-a3dd-4f46720da438\") " Jan 30 11:09:29 crc kubenswrapper[4869]: I0130 11:09:29.660121 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48e98cbc-623a-404b-a3dd-4f46720da438-utilities\") pod \"48e98cbc-623a-404b-a3dd-4f46720da438\" (UID: \"48e98cbc-623a-404b-a3dd-4f46720da438\") " Jan 30 11:09:29 crc kubenswrapper[4869]: I0130 11:09:29.660143 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-449sd\" (UniqueName: \"kubernetes.io/projected/48e98cbc-623a-404b-a3dd-4f46720da438-kube-api-access-449sd\") pod \"48e98cbc-623a-404b-a3dd-4f46720da438\" (UID: \"48e98cbc-623a-404b-a3dd-4f46720da438\") " Jan 30 11:09:29 crc kubenswrapper[4869]: I0130 11:09:29.661125 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48e98cbc-623a-404b-a3dd-4f46720da438-utilities" (OuterVolumeSpecName: "utilities") pod "48e98cbc-623a-404b-a3dd-4f46720da438" (UID: "48e98cbc-623a-404b-a3dd-4f46720da438"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:09:29 crc kubenswrapper[4869]: I0130 11:09:29.665782 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48e98cbc-623a-404b-a3dd-4f46720da438-kube-api-access-449sd" (OuterVolumeSpecName: "kube-api-access-449sd") pod "48e98cbc-623a-404b-a3dd-4f46720da438" (UID: "48e98cbc-623a-404b-a3dd-4f46720da438"). InnerVolumeSpecName "kube-api-access-449sd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:09:29 crc kubenswrapper[4869]: I0130 11:09:29.687457 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48e98cbc-623a-404b-a3dd-4f46720da438-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "48e98cbc-623a-404b-a3dd-4f46720da438" (UID: "48e98cbc-623a-404b-a3dd-4f46720da438"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:09:29 crc kubenswrapper[4869]: I0130 11:09:29.761556 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48e98cbc-623a-404b-a3dd-4f46720da438-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 11:09:29 crc kubenswrapper[4869]: I0130 11:09:29.761595 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48e98cbc-623a-404b-a3dd-4f46720da438-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 11:09:29 crc kubenswrapper[4869]: I0130 11:09:29.761608 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-449sd\" (UniqueName: \"kubernetes.io/projected/48e98cbc-623a-404b-a3dd-4f46720da438-kube-api-access-449sd\") on node \"crc\" DevicePath \"\"" Jan 30 11:09:30 crc kubenswrapper[4869]: I0130 11:09:30.195264 4869 generic.go:334] "Generic (PLEG): container finished" podID="48e98cbc-623a-404b-a3dd-4f46720da438" containerID="47d2c5c37f2c249896122e0402b4dcc569ed96d18e7c5fb00d0d43dcba0af4da" exitCode=0 Jan 30 11:09:30 crc kubenswrapper[4869]: I0130 11:09:30.195303 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d56l5" event={"ID":"48e98cbc-623a-404b-a3dd-4f46720da438","Type":"ContainerDied","Data":"47d2c5c37f2c249896122e0402b4dcc569ed96d18e7c5fb00d0d43dcba0af4da"} Jan 30 11:09:30 crc kubenswrapper[4869]: I0130 11:09:30.195327 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d56l5" event={"ID":"48e98cbc-623a-404b-a3dd-4f46720da438","Type":"ContainerDied","Data":"d8b09b467c5d83cd824ce4419a2d9819509b3e6f7042cf74e90ed57580463d2f"} Jan 30 11:09:30 crc kubenswrapper[4869]: I0130 11:09:30.195342 4869 scope.go:117] "RemoveContainer" containerID="47d2c5c37f2c249896122e0402b4dcc569ed96d18e7c5fb00d0d43dcba0af4da" Jan 30 11:09:30 crc kubenswrapper[4869]: I0130 11:09:30.195449 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d56l5" Jan 30 11:09:30 crc kubenswrapper[4869]: I0130 11:09:30.228512 4869 scope.go:117] "RemoveContainer" containerID="69e60604d4183ddd3fa4a4f90f87fc197922a0b317ead1dce1beb45e5590b743" Jan 30 11:09:30 crc kubenswrapper[4869]: I0130 11:09:30.231280 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d56l5"] Jan 30 11:09:30 crc kubenswrapper[4869]: I0130 11:09:30.240161 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-d56l5"] Jan 30 11:09:30 crc kubenswrapper[4869]: I0130 11:09:30.270289 4869 scope.go:117] "RemoveContainer" containerID="b3314f352c62b597c95eea53f45c04403b6ab7783255d8f3c475b57a48830b45" Jan 30 11:09:30 crc kubenswrapper[4869]: I0130 11:09:30.305898 4869 scope.go:117] "RemoveContainer" containerID="47d2c5c37f2c249896122e0402b4dcc569ed96d18e7c5fb00d0d43dcba0af4da" Jan 30 11:09:30 crc kubenswrapper[4869]: E0130 11:09:30.311876 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47d2c5c37f2c249896122e0402b4dcc569ed96d18e7c5fb00d0d43dcba0af4da\": container with ID starting with 47d2c5c37f2c249896122e0402b4dcc569ed96d18e7c5fb00d0d43dcba0af4da not found: ID does not exist" containerID="47d2c5c37f2c249896122e0402b4dcc569ed96d18e7c5fb00d0d43dcba0af4da" Jan 30 11:09:30 crc kubenswrapper[4869]: I0130 11:09:30.311930 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47d2c5c37f2c249896122e0402b4dcc569ed96d18e7c5fb00d0d43dcba0af4da"} err="failed to get container status \"47d2c5c37f2c249896122e0402b4dcc569ed96d18e7c5fb00d0d43dcba0af4da\": rpc error: code = NotFound desc = could not find container \"47d2c5c37f2c249896122e0402b4dcc569ed96d18e7c5fb00d0d43dcba0af4da\": container with ID starting with 47d2c5c37f2c249896122e0402b4dcc569ed96d18e7c5fb00d0d43dcba0af4da not found: ID does not exist" Jan 30 11:09:30 crc kubenswrapper[4869]: I0130 11:09:30.311961 4869 scope.go:117] "RemoveContainer" containerID="69e60604d4183ddd3fa4a4f90f87fc197922a0b317ead1dce1beb45e5590b743" Jan 30 11:09:30 crc kubenswrapper[4869]: E0130 11:09:30.315872 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69e60604d4183ddd3fa4a4f90f87fc197922a0b317ead1dce1beb45e5590b743\": container with ID starting with 69e60604d4183ddd3fa4a4f90f87fc197922a0b317ead1dce1beb45e5590b743 not found: ID does not exist" containerID="69e60604d4183ddd3fa4a4f90f87fc197922a0b317ead1dce1beb45e5590b743" Jan 30 11:09:30 crc kubenswrapper[4869]: I0130 11:09:30.315941 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69e60604d4183ddd3fa4a4f90f87fc197922a0b317ead1dce1beb45e5590b743"} err="failed to get container status \"69e60604d4183ddd3fa4a4f90f87fc197922a0b317ead1dce1beb45e5590b743\": rpc error: code = NotFound desc = could not find container \"69e60604d4183ddd3fa4a4f90f87fc197922a0b317ead1dce1beb45e5590b743\": container with ID starting with 69e60604d4183ddd3fa4a4f90f87fc197922a0b317ead1dce1beb45e5590b743 not found: ID does not exist" Jan 30 11:09:30 crc kubenswrapper[4869]: I0130 11:09:30.316004 4869 scope.go:117] "RemoveContainer" containerID="b3314f352c62b597c95eea53f45c04403b6ab7783255d8f3c475b57a48830b45" Jan 30 11:09:30 crc kubenswrapper[4869]: E0130 11:09:30.319920 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3314f352c62b597c95eea53f45c04403b6ab7783255d8f3c475b57a48830b45\": container with ID starting with b3314f352c62b597c95eea53f45c04403b6ab7783255d8f3c475b57a48830b45 not found: ID does not exist" containerID="b3314f352c62b597c95eea53f45c04403b6ab7783255d8f3c475b57a48830b45" Jan 30 11:09:30 crc kubenswrapper[4869]: I0130 11:09:30.319970 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3314f352c62b597c95eea53f45c04403b6ab7783255d8f3c475b57a48830b45"} err="failed to get container status \"b3314f352c62b597c95eea53f45c04403b6ab7783255d8f3c475b57a48830b45\": rpc error: code = NotFound desc = could not find container \"b3314f352c62b597c95eea53f45c04403b6ab7783255d8f3c475b57a48830b45\": container with ID starting with b3314f352c62b597c95eea53f45c04403b6ab7783255d8f3c475b57a48830b45 not found: ID does not exist" Jan 30 11:09:31 crc kubenswrapper[4869]: I0130 11:09:31.278058 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-t2mgt" Jan 30 11:09:31 crc kubenswrapper[4869]: I0130 11:09:31.278116 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-t2mgt" Jan 30 11:09:31 crc kubenswrapper[4869]: I0130 11:09:31.328156 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-t2mgt" Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.140524 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48e98cbc-623a-404b-a3dd-4f46720da438" path="/var/lib/kubelet/pods/48e98cbc-623a-404b-a3dd-4f46720da438/volumes" Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.260109 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-t2mgt" Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.875506 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-566c8844c5-llwtw"] Jan 30 11:09:32 crc kubenswrapper[4869]: E0130 11:09:32.875826 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48e98cbc-623a-404b-a3dd-4f46720da438" containerName="extract-content" Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.875845 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="48e98cbc-623a-404b-a3dd-4f46720da438" containerName="extract-content" Jan 30 11:09:32 crc kubenswrapper[4869]: E0130 11:09:32.875869 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48e98cbc-623a-404b-a3dd-4f46720da438" containerName="registry-server" Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.875875 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="48e98cbc-623a-404b-a3dd-4f46720da438" containerName="registry-server" Jan 30 11:09:32 crc kubenswrapper[4869]: E0130 11:09:32.875884 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48e98cbc-623a-404b-a3dd-4f46720da438" containerName="extract-utilities" Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.875891 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="48e98cbc-623a-404b-a3dd-4f46720da438" containerName="extract-utilities" Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.875995 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="48e98cbc-623a-404b-a3dd-4f46720da438" containerName="registry-server" Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.876468 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-566c8844c5-llwtw" Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.878401 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-7d225" Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.885135 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-5f9bbdc844-56ngn"] Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.886004 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-5f9bbdc844-56ngn" Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.889276 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-7f7c9" Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.890379 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-566c8844c5-llwtw"] Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.912327 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-5f9bbdc844-56ngn"] Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.925262 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-784f59d4f4-hgkkg"] Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.935868 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-784f59d4f4-hgkkg" Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.937881 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-tkf57" Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.942827 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-8f4c5cb64-wgddf"] Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.943566 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-wgddf" Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.947542 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-5nfnj" Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.951961 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-784f59d4f4-hgkkg"] Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.964002 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-8f4c5cb64-wgddf"] Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.979614 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-t2mgt"] Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.984003 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-54985f5875-6m8mq"] Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.984803 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-54985f5875-6m8mq" Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.987109 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-w2wvn" Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.997346 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-q2f9n"] Jan 30 11:09:32 crc kubenswrapper[4869]: I0130 11:09:32.998224 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-q2f9n" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.004960 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95nhj\" (UniqueName: \"kubernetes.io/projected/88221c24-f744-4a85-9f3e-cede7b0a4f67-kube-api-access-95nhj\") pod \"barbican-operator-controller-manager-566c8844c5-llwtw\" (UID: \"88221c24-f744-4a85-9f3e-cede7b0a4f67\") " pod="openstack-operators/barbican-operator-controller-manager-566c8844c5-llwtw" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.005028 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwv8p\" (UniqueName: \"kubernetes.io/projected/818c8e4f-f344-4544-a562-35fd8865bdb9-kube-api-access-cwv8p\") pod \"glance-operator-controller-manager-784f59d4f4-hgkkg\" (UID: \"818c8e4f-f344-4544-a562-35fd8865bdb9\") " pod="openstack-operators/glance-operator-controller-manager-784f59d4f4-hgkkg" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.005179 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ck4m2\" (UniqueName: \"kubernetes.io/projected/f0d6cb91-dba9-4395-9438-8ab72ea16207-kube-api-access-ck4m2\") pod \"cinder-operator-controller-manager-5f9bbdc844-56ngn\" (UID: \"f0d6cb91-dba9-4395-9438-8ab72ea16207\") " pod="openstack-operators/cinder-operator-controller-manager-5f9bbdc844-56ngn" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.011877 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-q2f9n"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.017349 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-wsk5t" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.031703 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-54985f5875-6m8mq"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.048745 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6fd9bbb6f6-ql2pg"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.049535 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6fd9bbb6f6-ql2pg" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.073109 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-6kpbn" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.082779 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-ghcvm"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.083665 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-79955696d6-ghcvm" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.086509 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-fxnb7" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.086634 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.094252 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-6c9d56f9bd-n5pbt"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.095162 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-6c9d56f9bd-n5pbt" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.098184 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-74xx8" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.113808 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ck4m2\" (UniqueName: \"kubernetes.io/projected/f0d6cb91-dba9-4395-9438-8ab72ea16207-kube-api-access-ck4m2\") pod \"cinder-operator-controller-manager-5f9bbdc844-56ngn\" (UID: \"f0d6cb91-dba9-4395-9438-8ab72ea16207\") " pod="openstack-operators/cinder-operator-controller-manager-5f9bbdc844-56ngn" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.113857 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cb5pg\" (UniqueName: \"kubernetes.io/projected/4eb7653e-3fa6-4479-a1de-66d77e70b60c-kube-api-access-cb5pg\") pod \"designate-operator-controller-manager-8f4c5cb64-wgddf\" (UID: \"4eb7653e-3fa6-4479-a1de-66d77e70b60c\") " pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-wgddf" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.113904 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95nhj\" (UniqueName: \"kubernetes.io/projected/88221c24-f744-4a85-9f3e-cede7b0a4f67-kube-api-access-95nhj\") pod \"barbican-operator-controller-manager-566c8844c5-llwtw\" (UID: \"88221c24-f744-4a85-9f3e-cede7b0a4f67\") " pod="openstack-operators/barbican-operator-controller-manager-566c8844c5-llwtw" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.113932 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzctb\" (UniqueName: \"kubernetes.io/projected/68a4d7f9-c03f-4552-8571-344434546d04-kube-api-access-kzctb\") pod \"horizon-operator-controller-manager-5fb775575f-q2f9n\" (UID: \"68a4d7f9-c03f-4552-8571-344434546d04\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-q2f9n" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.114016 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwv8p\" (UniqueName: \"kubernetes.io/projected/818c8e4f-f344-4544-a562-35fd8865bdb9-kube-api-access-cwv8p\") pod \"glance-operator-controller-manager-784f59d4f4-hgkkg\" (UID: \"818c8e4f-f344-4544-a562-35fd8865bdb9\") " pod="openstack-operators/glance-operator-controller-manager-784f59d4f4-hgkkg" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.114050 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2748\" (UniqueName: \"kubernetes.io/projected/edcec497-d8a5-4cc4-b966-90bda3727925-kube-api-access-b2748\") pod \"heat-operator-controller-manager-54985f5875-6m8mq\" (UID: \"edcec497-d8a5-4cc4-b966-90bda3727925\") " pod="openstack-operators/heat-operator-controller-manager-54985f5875-6m8mq" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.134450 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-ghcvm"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.139209 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6fd9bbb6f6-ql2pg"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.145739 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-6c9d56f9bd-n5pbt"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.148958 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-74954f9f78-flsxj"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.152616 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-74954f9f78-flsxj" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.153298 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95nhj\" (UniqueName: \"kubernetes.io/projected/88221c24-f744-4a85-9f3e-cede7b0a4f67-kube-api-access-95nhj\") pod \"barbican-operator-controller-manager-566c8844c5-llwtw\" (UID: \"88221c24-f744-4a85-9f3e-cede7b0a4f67\") " pod="openstack-operators/barbican-operator-controller-manager-566c8844c5-llwtw" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.155529 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwv8p\" (UniqueName: \"kubernetes.io/projected/818c8e4f-f344-4544-a562-35fd8865bdb9-kube-api-access-cwv8p\") pod \"glance-operator-controller-manager-784f59d4f4-hgkkg\" (UID: \"818c8e4f-f344-4544-a562-35fd8865bdb9\") " pod="openstack-operators/glance-operator-controller-manager-784f59d4f4-hgkkg" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.166463 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-vw4wt" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.173813 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ck4m2\" (UniqueName: \"kubernetes.io/projected/f0d6cb91-dba9-4395-9438-8ab72ea16207-kube-api-access-ck4m2\") pod \"cinder-operator-controller-manager-5f9bbdc844-56ngn\" (UID: \"f0d6cb91-dba9-4395-9438-8ab72ea16207\") " pod="openstack-operators/cinder-operator-controller-manager-5f9bbdc844-56ngn" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.176671 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-74954f9f78-flsxj"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.192847 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-566c8844c5-llwtw" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.200439 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6cfc4f6754-gcvf8"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.201488 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-6cfc4f6754-gcvf8" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.207202 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-z6c9m" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.208030 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-22kgf"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.208642 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-5f9bbdc844-56ngn" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.208854 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-22kgf" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.213641 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-l8x9j" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.214739 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/11587507-3c83-42d5-af04-3e352e7c7689-cert\") pod \"infra-operator-controller-manager-79955696d6-ghcvm\" (UID: \"11587507-3c83-42d5-af04-3e352e7c7689\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-ghcvm" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.214786 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnv57\" (UniqueName: \"kubernetes.io/projected/495b2a0c-3cb3-4a22-8609-eb786d6c693f-kube-api-access-rnv57\") pod \"keystone-operator-controller-manager-6c9d56f9bd-n5pbt\" (UID: \"495b2a0c-3cb3-4a22-8609-eb786d6c693f\") " pod="openstack-operators/keystone-operator-controller-manager-6c9d56f9bd-n5pbt" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.214840 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cb5pg\" (UniqueName: \"kubernetes.io/projected/4eb7653e-3fa6-4479-a1de-66d77e70b60c-kube-api-access-cb5pg\") pod \"designate-operator-controller-manager-8f4c5cb64-wgddf\" (UID: \"4eb7653e-3fa6-4479-a1de-66d77e70b60c\") " pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-wgddf" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.214863 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t97gd\" (UniqueName: \"kubernetes.io/projected/e9e042ed-4c1e-430b-bb7e-fda28cbef607-kube-api-access-t97gd\") pod \"ironic-operator-controller-manager-6fd9bbb6f6-ql2pg\" (UID: \"e9e042ed-4c1e-430b-bb7e-fda28cbef607\") " pod="openstack-operators/ironic-operator-controller-manager-6fd9bbb6f6-ql2pg" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.214902 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzctb\" (UniqueName: \"kubernetes.io/projected/68a4d7f9-c03f-4552-8571-344434546d04-kube-api-access-kzctb\") pod \"horizon-operator-controller-manager-5fb775575f-q2f9n\" (UID: \"68a4d7f9-c03f-4552-8571-344434546d04\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-q2f9n" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.214949 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hghq9\" (UniqueName: \"kubernetes.io/projected/11587507-3c83-42d5-af04-3e352e7c7689-kube-api-access-hghq9\") pod \"infra-operator-controller-manager-79955696d6-ghcvm\" (UID: \"11587507-3c83-42d5-af04-3e352e7c7689\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-ghcvm" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.215063 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2748\" (UniqueName: \"kubernetes.io/projected/edcec497-d8a5-4cc4-b966-90bda3727925-kube-api-access-b2748\") pod \"heat-operator-controller-manager-54985f5875-6m8mq\" (UID: \"edcec497-d8a5-4cc4-b966-90bda3727925\") " pod="openstack-operators/heat-operator-controller-manager-54985f5875-6m8mq" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.261702 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cb5pg\" (UniqueName: \"kubernetes.io/projected/4eb7653e-3fa6-4479-a1de-66d77e70b60c-kube-api-access-cb5pg\") pod \"designate-operator-controller-manager-8f4c5cb64-wgddf\" (UID: \"4eb7653e-3fa6-4479-a1de-66d77e70b60c\") " pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-wgddf" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.269384 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2748\" (UniqueName: \"kubernetes.io/projected/edcec497-d8a5-4cc4-b966-90bda3727925-kube-api-access-b2748\") pod \"heat-operator-controller-manager-54985f5875-6m8mq\" (UID: \"edcec497-d8a5-4cc4-b966-90bda3727925\") " pod="openstack-operators/heat-operator-controller-manager-54985f5875-6m8mq" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.271246 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzctb\" (UniqueName: \"kubernetes.io/projected/68a4d7f9-c03f-4552-8571-344434546d04-kube-api-access-kzctb\") pod \"horizon-operator-controller-manager-5fb775575f-q2f9n\" (UID: \"68a4d7f9-c03f-4552-8571-344434546d04\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-q2f9n" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.275885 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-784f59d4f4-hgkkg" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.276214 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-wgddf" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.316463 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6cfc4f6754-gcvf8"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.317107 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/11587507-3c83-42d5-af04-3e352e7c7689-cert\") pod \"infra-operator-controller-manager-79955696d6-ghcvm\" (UID: \"11587507-3c83-42d5-af04-3e352e7c7689\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-ghcvm" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.317129 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnv57\" (UniqueName: \"kubernetes.io/projected/495b2a0c-3cb3-4a22-8609-eb786d6c693f-kube-api-access-rnv57\") pod \"keystone-operator-controller-manager-6c9d56f9bd-n5pbt\" (UID: \"495b2a0c-3cb3-4a22-8609-eb786d6c693f\") " pod="openstack-operators/keystone-operator-controller-manager-6c9d56f9bd-n5pbt" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.317156 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t97gd\" (UniqueName: \"kubernetes.io/projected/e9e042ed-4c1e-430b-bb7e-fda28cbef607-kube-api-access-t97gd\") pod \"ironic-operator-controller-manager-6fd9bbb6f6-ql2pg\" (UID: \"e9e042ed-4c1e-430b-bb7e-fda28cbef607\") " pod="openstack-operators/ironic-operator-controller-manager-6fd9bbb6f6-ql2pg" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.317191 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xft99\" (UniqueName: \"kubernetes.io/projected/22150fc4-3e93-45fd-9301-f7b552f57f48-kube-api-access-xft99\") pod \"neutron-operator-controller-manager-6cfc4f6754-gcvf8\" (UID: \"22150fc4-3e93-45fd-9301-f7b552f57f48\") " pod="openstack-operators/neutron-operator-controller-manager-6cfc4f6754-gcvf8" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.317211 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tg27\" (UniqueName: \"kubernetes.io/projected/484fb11b-1e2c-40c8-944d-a34a6fbaed79-kube-api-access-4tg27\") pod \"mariadb-operator-controller-manager-67bf948998-22kgf\" (UID: \"484fb11b-1e2c-40c8-944d-a34a6fbaed79\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-22kgf" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.317249 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hghq9\" (UniqueName: \"kubernetes.io/projected/11587507-3c83-42d5-af04-3e352e7c7689-kube-api-access-hghq9\") pod \"infra-operator-controller-manager-79955696d6-ghcvm\" (UID: \"11587507-3c83-42d5-af04-3e352e7c7689\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-ghcvm" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.317278 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwg99\" (UniqueName: \"kubernetes.io/projected/0a3ad98c-dec5-417c-890a-227fcab3d149-kube-api-access-mwg99\") pod \"manila-operator-controller-manager-74954f9f78-flsxj\" (UID: \"0a3ad98c-dec5-417c-890a-227fcab3d149\") " pod="openstack-operators/manila-operator-controller-manager-74954f9f78-flsxj" Jan 30 11:09:33 crc kubenswrapper[4869]: E0130 11:09:33.317727 4869 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 30 11:09:33 crc kubenswrapper[4869]: E0130 11:09:33.317777 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/11587507-3c83-42d5-af04-3e352e7c7689-cert podName:11587507-3c83-42d5-af04-3e352e7c7689 nodeName:}" failed. No retries permitted until 2026-01-30 11:09:33.817758166 +0000 UTC m=+924.367634232 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/11587507-3c83-42d5-af04-3e352e7c7689-cert") pod "infra-operator-controller-manager-79955696d6-ghcvm" (UID: "11587507-3c83-42d5-af04-3e352e7c7689") : secret "infra-operator-webhook-server-cert" not found Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.326869 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-22kgf"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.327158 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-54985f5875-6m8mq" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.337584 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t97gd\" (UniqueName: \"kubernetes.io/projected/e9e042ed-4c1e-430b-bb7e-fda28cbef607-kube-api-access-t97gd\") pod \"ironic-operator-controller-manager-6fd9bbb6f6-ql2pg\" (UID: \"e9e042ed-4c1e-430b-bb7e-fda28cbef607\") " pod="openstack-operators/ironic-operator-controller-manager-6fd9bbb6f6-ql2pg" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.341181 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-67f5956bc9-vs89l"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.341420 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hghq9\" (UniqueName: \"kubernetes.io/projected/11587507-3c83-42d5-af04-3e352e7c7689-kube-api-access-hghq9\") pod \"infra-operator-controller-manager-79955696d6-ghcvm\" (UID: \"11587507-3c83-42d5-af04-3e352e7c7689\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-ghcvm" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.342019 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-67f5956bc9-vs89l" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.345424 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnv57\" (UniqueName: \"kubernetes.io/projected/495b2a0c-3cb3-4a22-8609-eb786d6c693f-kube-api-access-rnv57\") pod \"keystone-operator-controller-manager-6c9d56f9bd-n5pbt\" (UID: \"495b2a0c-3cb3-4a22-8609-eb786d6c693f\") " pod="openstack-operators/keystone-operator-controller-manager-6c9d56f9bd-n5pbt" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.345786 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-q2f9n" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.353071 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-6wvbp" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.396677 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6fd9bbb6f6-ql2pg" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.397362 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-694c6dcf95-m4d9d"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.459874 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-6c9d56f9bd-n5pbt" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.476126 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xft99\" (UniqueName: \"kubernetes.io/projected/22150fc4-3e93-45fd-9301-f7b552f57f48-kube-api-access-xft99\") pod \"neutron-operator-controller-manager-6cfc4f6754-gcvf8\" (UID: \"22150fc4-3e93-45fd-9301-f7b552f57f48\") " pod="openstack-operators/neutron-operator-controller-manager-6cfc4f6754-gcvf8" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.476208 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tg27\" (UniqueName: \"kubernetes.io/projected/484fb11b-1e2c-40c8-944d-a34a6fbaed79-kube-api-access-4tg27\") pod \"mariadb-operator-controller-manager-67bf948998-22kgf\" (UID: \"484fb11b-1e2c-40c8-944d-a34a6fbaed79\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-22kgf" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.476264 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxr2w\" (UniqueName: \"kubernetes.io/projected/ab8e3667-4a4c-47df-b46c-1d3d5a315fe0-kube-api-access-hxr2w\") pod \"nova-operator-controller-manager-67f5956bc9-vs89l\" (UID: \"ab8e3667-4a4c-47df-b46c-1d3d5a315fe0\") " pod="openstack-operators/nova-operator-controller-manager-67f5956bc9-vs89l" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.476384 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwg99\" (UniqueName: \"kubernetes.io/projected/0a3ad98c-dec5-417c-890a-227fcab3d149-kube-api-access-mwg99\") pod \"manila-operator-controller-manager-74954f9f78-flsxj\" (UID: \"0a3ad98c-dec5-417c-890a-227fcab3d149\") " pod="openstack-operators/manila-operator-controller-manager-74954f9f78-flsxj" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.502085 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-67f5956bc9-vs89l"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.502139 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-694c6dcf95-m4d9d"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.502256 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-694c6dcf95-m4d9d" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.508896 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-9ljfp" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.525864 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-lvrcp"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.526904 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-lvrcp" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.538783 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-v9vsm" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.547338 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwg99\" (UniqueName: \"kubernetes.io/projected/0a3ad98c-dec5-417c-890a-227fcab3d149-kube-api-access-mwg99\") pod \"manila-operator-controller-manager-74954f9f78-flsxj\" (UID: \"0a3ad98c-dec5-417c-890a-227fcab3d149\") " pod="openstack-operators/manila-operator-controller-manager-74954f9f78-flsxj" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.558375 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tg27\" (UniqueName: \"kubernetes.io/projected/484fb11b-1e2c-40c8-944d-a34a6fbaed79-kube-api-access-4tg27\") pod \"mariadb-operator-controller-manager-67bf948998-22kgf\" (UID: \"484fb11b-1e2c-40c8-944d-a34a6fbaed79\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-22kgf" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.561764 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xft99\" (UniqueName: \"kubernetes.io/projected/22150fc4-3e93-45fd-9301-f7b552f57f48-kube-api-access-xft99\") pod \"neutron-operator-controller-manager-6cfc4f6754-gcvf8\" (UID: \"22150fc4-3e93-45fd-9301-f7b552f57f48\") " pod="openstack-operators/neutron-operator-controller-manager-6cfc4f6754-gcvf8" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.562926 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-lvrcp"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.577213 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-tk8kg"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.578048 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-tk8kg" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.580337 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.581499 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.584412 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.587335 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-js4bg" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.588270 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxr2w\" (UniqueName: \"kubernetes.io/projected/ab8e3667-4a4c-47df-b46c-1d3d5a315fe0-kube-api-access-hxr2w\") pod \"nova-operator-controller-manager-67f5956bc9-vs89l\" (UID: \"ab8e3667-4a4c-47df-b46c-1d3d5a315fe0\") " pod="openstack-operators/nova-operator-controller-manager-67f5956bc9-vs89l" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.593074 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-djlzf" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.611903 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-6cfc4f6754-gcvf8" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.614521 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxr2w\" (UniqueName: \"kubernetes.io/projected/ab8e3667-4a4c-47df-b46c-1d3d5a315fe0-kube-api-access-hxr2w\") pod \"nova-operator-controller-manager-67f5956bc9-vs89l\" (UID: \"ab8e3667-4a4c-47df-b46c-1d3d5a315fe0\") " pod="openstack-operators/nova-operator-controller-manager-67f5956bc9-vs89l" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.644423 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-22kgf" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.651875 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-tk8kg"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.689393 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.690312 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qkf5\" (UniqueName: \"kubernetes.io/projected/61332cae-942e-475a-85b9-2020908d8266-kube-api-access-5qkf5\") pod \"placement-operator-controller-manager-5b964cf4cd-tk8kg\" (UID: \"61332cae-942e-475a-85b9-2020908d8266\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-tk8kg" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.690344 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqrhn\" (UniqueName: \"kubernetes.io/projected/28c859ed-db94-494c-afa3-c1cb96425ac5-kube-api-access-qqrhn\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j\" (UID: \"28c859ed-db94-494c-afa3-c1cb96425ac5\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.690371 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/28c859ed-db94-494c-afa3-c1cb96425ac5-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j\" (UID: \"28c859ed-db94-494c-afa3-c1cb96425ac5\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.690438 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4lxq\" (UniqueName: \"kubernetes.io/projected/757e291c-f7c9-4b61-9ed8-5e78c4ffe989-kube-api-access-f4lxq\") pod \"octavia-operator-controller-manager-694c6dcf95-m4d9d\" (UID: \"757e291c-f7c9-4b61-9ed8-5e78c4ffe989\") " pod="openstack-operators/octavia-operator-controller-manager-694c6dcf95-m4d9d" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.690474 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sj6d5\" (UniqueName: \"kubernetes.io/projected/50843920-ef36-4230-8840-0d34b70f602b-kube-api-access-sj6d5\") pod \"ovn-operator-controller-manager-788c46999f-lvrcp\" (UID: \"50843920-ef36-4230-8840-0d34b70f602b\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-lvrcp" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.699243 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-67f5956bc9-vs89l" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.709546 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-7d4f9d9c9b-8hsst"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.710471 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-7d4f9d9c9b-8hsst" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.713996 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-4jf8z" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.738946 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-7d4f9d9c9b-8hsst"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.757917 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cd99594-rjwgj"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.758792 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cd99594-rjwgj" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.766173 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-28bj6" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.768787 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cd99594-rjwgj"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.783859 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-28mvf"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.784880 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-28mvf" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.787649 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-28mvf"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.792634 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4lxq\" (UniqueName: \"kubernetes.io/projected/757e291c-f7c9-4b61-9ed8-5e78c4ffe989-kube-api-access-f4lxq\") pod \"octavia-operator-controller-manager-694c6dcf95-m4d9d\" (UID: \"757e291c-f7c9-4b61-9ed8-5e78c4ffe989\") " pod="openstack-operators/octavia-operator-controller-manager-694c6dcf95-m4d9d" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.792741 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sj6d5\" (UniqueName: \"kubernetes.io/projected/50843920-ef36-4230-8840-0d34b70f602b-kube-api-access-sj6d5\") pod \"ovn-operator-controller-manager-788c46999f-lvrcp\" (UID: \"50843920-ef36-4230-8840-0d34b70f602b\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-lvrcp" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.792799 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qkf5\" (UniqueName: \"kubernetes.io/projected/61332cae-942e-475a-85b9-2020908d8266-kube-api-access-5qkf5\") pod \"placement-operator-controller-manager-5b964cf4cd-tk8kg\" (UID: \"61332cae-942e-475a-85b9-2020908d8266\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-tk8kg" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.792836 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqrhn\" (UniqueName: \"kubernetes.io/projected/28c859ed-db94-494c-afa3-c1cb96425ac5-kube-api-access-qqrhn\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j\" (UID: \"28c859ed-db94-494c-afa3-c1cb96425ac5\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.792868 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/28c859ed-db94-494c-afa3-c1cb96425ac5-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j\" (UID: \"28c859ed-db94-494c-afa3-c1cb96425ac5\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.792870 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-cd8dh" Jan 30 11:09:33 crc kubenswrapper[4869]: E0130 11:09:33.793261 4869 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 30 11:09:33 crc kubenswrapper[4869]: E0130 11:09:33.793322 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28c859ed-db94-494c-afa3-c1cb96425ac5-cert podName:28c859ed-db94-494c-afa3-c1cb96425ac5 nodeName:}" failed. No retries permitted until 2026-01-30 11:09:34.293304497 +0000 UTC m=+924.843180563 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/28c859ed-db94-494c-afa3-c1cb96425ac5-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j" (UID: "28c859ed-db94-494c-afa3-c1cb96425ac5") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.807999 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-5bf648c946-89m4w"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.808993 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-5bf648c946-89m4w" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.812517 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-5bf648c946-89m4w"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.817524 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqrhn\" (UniqueName: \"kubernetes.io/projected/28c859ed-db94-494c-afa3-c1cb96425ac5-kube-api-access-qqrhn\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j\" (UID: \"28c859ed-db94-494c-afa3-c1cb96425ac5\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.832329 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-lww2t" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.835790 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-74954f9f78-flsxj" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.837983 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.838979 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.839790 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qkf5\" (UniqueName: \"kubernetes.io/projected/61332cae-942e-475a-85b9-2020908d8266-kube-api-access-5qkf5\") pod \"placement-operator-controller-manager-5b964cf4cd-tk8kg\" (UID: \"61332cae-942e-475a-85b9-2020908d8266\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-tk8kg" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.846999 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.847174 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-chjhb" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.847237 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4lxq\" (UniqueName: \"kubernetes.io/projected/757e291c-f7c9-4b61-9ed8-5e78c4ffe989-kube-api-access-f4lxq\") pod \"octavia-operator-controller-manager-694c6dcf95-m4d9d\" (UID: \"757e291c-f7c9-4b61-9ed8-5e78c4ffe989\") " pod="openstack-operators/octavia-operator-controller-manager-694c6dcf95-m4d9d" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.847300 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.847498 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-694c6dcf95-m4d9d" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.866542 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sj6d5\" (UniqueName: \"kubernetes.io/projected/50843920-ef36-4230-8840-0d34b70f602b-kube-api-access-sj6d5\") pod \"ovn-operator-controller-manager-788c46999f-lvrcp\" (UID: \"50843920-ef36-4230-8840-0d34b70f602b\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-lvrcp" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.872798 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.879204 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-lvrcp" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.894218 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84vrp\" (UniqueName: \"kubernetes.io/projected/6e096fd6-f27e-4561-a86b-8991d11d82e7-kube-api-access-84vrp\") pod \"swift-operator-controller-manager-7d4f9d9c9b-8hsst\" (UID: \"6e096fd6-f27e-4561-a86b-8991d11d82e7\") " pod="openstack-operators/swift-operator-controller-manager-7d4f9d9c9b-8hsst" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.894313 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/11587507-3c83-42d5-af04-3e352e7c7689-cert\") pod \"infra-operator-controller-manager-79955696d6-ghcvm\" (UID: \"11587507-3c83-42d5-af04-3e352e7c7689\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-ghcvm" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.894361 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n224x\" (UniqueName: \"kubernetes.io/projected/79e9a1e6-68d5-422a-9446-0d4f106f5f22-kube-api-access-n224x\") pod \"test-operator-controller-manager-56f8bfcd9f-28mvf\" (UID: \"79e9a1e6-68d5-422a-9446-0d4f106f5f22\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-28mvf" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.894382 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmjvp\" (UniqueName: \"kubernetes.io/projected/64399a12-4f5e-42e8-bc4b-b80347287a30-kube-api-access-kmjvp\") pod \"telemetry-operator-controller-manager-76cd99594-rjwgj\" (UID: \"64399a12-4f5e-42e8-bc4b-b80347287a30\") " pod="openstack-operators/telemetry-operator-controller-manager-76cd99594-rjwgj" Jan 30 11:09:33 crc kubenswrapper[4869]: E0130 11:09:33.894543 4869 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 30 11:09:33 crc kubenswrapper[4869]: E0130 11:09:33.894594 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/11587507-3c83-42d5-af04-3e352e7c7689-cert podName:11587507-3c83-42d5-af04-3e352e7c7689 nodeName:}" failed. No retries permitted until 2026-01-30 11:09:34.894575304 +0000 UTC m=+925.444451360 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/11587507-3c83-42d5-af04-3e352e7c7689-cert") pod "infra-operator-controller-manager-79955696d6-ghcvm" (UID: "11587507-3c83-42d5-af04-3e352e7c7689") : secret "infra-operator-webhook-server-cert" not found Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.909784 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-bxjxm"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.911091 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-bxjxm" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.927213 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-xjjws" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.931081 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-tk8kg" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.938920 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-bxjxm"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.976841 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-566c8844c5-llwtw"] Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.996567 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84vrp\" (UniqueName: \"kubernetes.io/projected/6e096fd6-f27e-4561-a86b-8991d11d82e7-kube-api-access-84vrp\") pod \"swift-operator-controller-manager-7d4f9d9c9b-8hsst\" (UID: \"6e096fd6-f27e-4561-a86b-8991d11d82e7\") " pod="openstack-operators/swift-operator-controller-manager-7d4f9d9c9b-8hsst" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.996646 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2h45\" (UniqueName: \"kubernetes.io/projected/fffb7d27-e4fa-4ba1-8a0b-cc6e5d18bd29-kube-api-access-k2h45\") pod \"watcher-operator-controller-manager-5bf648c946-89m4w\" (UID: \"fffb7d27-e4fa-4ba1-8a0b-cc6e5d18bd29\") " pod="openstack-operators/watcher-operator-controller-manager-5bf648c946-89m4w" Jan 30 11:09:33 crc kubenswrapper[4869]: I0130 11:09:33.996680 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-metrics-certs\") pod \"openstack-operator-controller-manager-5f57b98d7d-srxcx\" (UID: \"e5597fb4-0c80-4868-ae07-b38449e7a4af\") " pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.003608 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n224x\" (UniqueName: \"kubernetes.io/projected/79e9a1e6-68d5-422a-9446-0d4f106f5f22-kube-api-access-n224x\") pod \"test-operator-controller-manager-56f8bfcd9f-28mvf\" (UID: \"79e9a1e6-68d5-422a-9446-0d4f106f5f22\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-28mvf" Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.003687 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmjvp\" (UniqueName: \"kubernetes.io/projected/64399a12-4f5e-42e8-bc4b-b80347287a30-kube-api-access-kmjvp\") pod \"telemetry-operator-controller-manager-76cd99594-rjwgj\" (UID: \"64399a12-4f5e-42e8-bc4b-b80347287a30\") " pod="openstack-operators/telemetry-operator-controller-manager-76cd99594-rjwgj" Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.003873 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-webhook-certs\") pod \"openstack-operator-controller-manager-5f57b98d7d-srxcx\" (UID: \"e5597fb4-0c80-4868-ae07-b38449e7a4af\") " pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.003907 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwvm5\" (UniqueName: \"kubernetes.io/projected/e5597fb4-0c80-4868-ae07-b38449e7a4af-kube-api-access-gwvm5\") pod \"openstack-operator-controller-manager-5f57b98d7d-srxcx\" (UID: \"e5597fb4-0c80-4868-ae07-b38449e7a4af\") " pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.164932 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84vrp\" (UniqueName: \"kubernetes.io/projected/6e096fd6-f27e-4561-a86b-8991d11d82e7-kube-api-access-84vrp\") pod \"swift-operator-controller-manager-7d4f9d9c9b-8hsst\" (UID: \"6e096fd6-f27e-4561-a86b-8991d11d82e7\") " pod="openstack-operators/swift-operator-controller-manager-7d4f9d9c9b-8hsst" Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.173303 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmjvp\" (UniqueName: \"kubernetes.io/projected/64399a12-4f5e-42e8-bc4b-b80347287a30-kube-api-access-kmjvp\") pod \"telemetry-operator-controller-manager-76cd99594-rjwgj\" (UID: \"64399a12-4f5e-42e8-bc4b-b80347287a30\") " pod="openstack-operators/telemetry-operator-controller-manager-76cd99594-rjwgj" Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.175276 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n224x\" (UniqueName: \"kubernetes.io/projected/79e9a1e6-68d5-422a-9446-0d4f106f5f22-kube-api-access-n224x\") pod \"test-operator-controller-manager-56f8bfcd9f-28mvf\" (UID: \"79e9a1e6-68d5-422a-9446-0d4f106f5f22\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-28mvf" Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.176621 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2h45\" (UniqueName: \"kubernetes.io/projected/fffb7d27-e4fa-4ba1-8a0b-cc6e5d18bd29-kube-api-access-k2h45\") pod \"watcher-operator-controller-manager-5bf648c946-89m4w\" (UID: \"fffb7d27-e4fa-4ba1-8a0b-cc6e5d18bd29\") " pod="openstack-operators/watcher-operator-controller-manager-5bf648c946-89m4w" Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.176681 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-metrics-certs\") pod \"openstack-operator-controller-manager-5f57b98d7d-srxcx\" (UID: \"e5597fb4-0c80-4868-ae07-b38449e7a4af\") " pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.176805 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btc29\" (UniqueName: \"kubernetes.io/projected/1361000f-8ad7-4e93-b7cc-c059e5ba6641-kube-api-access-btc29\") pod \"rabbitmq-cluster-operator-manager-668c99d594-bxjxm\" (UID: \"1361000f-8ad7-4e93-b7cc-c059e5ba6641\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-bxjxm" Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.176934 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-webhook-certs\") pod \"openstack-operator-controller-manager-5f57b98d7d-srxcx\" (UID: \"e5597fb4-0c80-4868-ae07-b38449e7a4af\") " pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.176965 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwvm5\" (UniqueName: \"kubernetes.io/projected/e5597fb4-0c80-4868-ae07-b38449e7a4af-kube-api-access-gwvm5\") pod \"openstack-operator-controller-manager-5f57b98d7d-srxcx\" (UID: \"e5597fb4-0c80-4868-ae07-b38449e7a4af\") " pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" Jan 30 11:09:34 crc kubenswrapper[4869]: E0130 11:09:34.177528 4869 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 30 11:09:34 crc kubenswrapper[4869]: E0130 11:09:34.177668 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-webhook-certs podName:e5597fb4-0c80-4868-ae07-b38449e7a4af nodeName:}" failed. No retries permitted until 2026-01-30 11:09:34.677649267 +0000 UTC m=+925.227525333 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-webhook-certs") pod "openstack-operator-controller-manager-5f57b98d7d-srxcx" (UID: "e5597fb4-0c80-4868-ae07-b38449e7a4af") : secret "webhook-server-cert" not found Jan 30 11:09:34 crc kubenswrapper[4869]: E0130 11:09:34.178024 4869 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 30 11:09:34 crc kubenswrapper[4869]: E0130 11:09:34.178136 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-metrics-certs podName:e5597fb4-0c80-4868-ae07-b38449e7a4af nodeName:}" failed. No retries permitted until 2026-01-30 11:09:34.67811928 +0000 UTC m=+925.227995336 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-metrics-certs") pod "openstack-operator-controller-manager-5f57b98d7d-srxcx" (UID: "e5597fb4-0c80-4868-ae07-b38449e7a4af") : secret "metrics-server-cert" not found Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.217578 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cd99594-rjwgj" Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.232450 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwvm5\" (UniqueName: \"kubernetes.io/projected/e5597fb4-0c80-4868-ae07-b38449e7a4af-kube-api-access-gwvm5\") pod \"openstack-operator-controller-manager-5f57b98d7d-srxcx\" (UID: \"e5597fb4-0c80-4868-ae07-b38449e7a4af\") " pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.245329 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2h45\" (UniqueName: \"kubernetes.io/projected/fffb7d27-e4fa-4ba1-8a0b-cc6e5d18bd29-kube-api-access-k2h45\") pod \"watcher-operator-controller-manager-5bf648c946-89m4w\" (UID: \"fffb7d27-e4fa-4ba1-8a0b-cc6e5d18bd29\") " pod="openstack-operators/watcher-operator-controller-manager-5bf648c946-89m4w" Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.254368 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-t2mgt" podUID="53519751-f7c0-4f7c-8a3e-eea53b107ac5" containerName="registry-server" containerID="cri-o://792e9406ea138473af74dbe1003420fac7362a786ccda7bd1e92741d70007847" gracePeriod=2 Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.279686 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btc29\" (UniqueName: \"kubernetes.io/projected/1361000f-8ad7-4e93-b7cc-c059e5ba6641-kube-api-access-btc29\") pod \"rabbitmq-cluster-operator-manager-668c99d594-bxjxm\" (UID: \"1361000f-8ad7-4e93-b7cc-c059e5ba6641\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-bxjxm" Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.310683 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-566c8844c5-llwtw" event={"ID":"88221c24-f744-4a85-9f3e-cede7b0a4f67","Type":"ContainerStarted","Data":"3600a799a3d065be169e1a34917e59dcfa7d56c4e922d1df2e86ae45e285d9f1"} Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.325466 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-28mvf" Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.331988 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btc29\" (UniqueName: \"kubernetes.io/projected/1361000f-8ad7-4e93-b7cc-c059e5ba6641-kube-api-access-btc29\") pod \"rabbitmq-cluster-operator-manager-668c99d594-bxjxm\" (UID: \"1361000f-8ad7-4e93-b7cc-c059e5ba6641\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-bxjxm" Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.351299 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-7d4f9d9c9b-8hsst" Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.381835 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/28c859ed-db94-494c-afa3-c1cb96425ac5-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j\" (UID: \"28c859ed-db94-494c-afa3-c1cb96425ac5\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j" Jan 30 11:09:34 crc kubenswrapper[4869]: E0130 11:09:34.382018 4869 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 30 11:09:34 crc kubenswrapper[4869]: E0130 11:09:34.382066 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28c859ed-db94-494c-afa3-c1cb96425ac5-cert podName:28c859ed-db94-494c-afa3-c1cb96425ac5 nodeName:}" failed. No retries permitted until 2026-01-30 11:09:35.382052534 +0000 UTC m=+925.931928600 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/28c859ed-db94-494c-afa3-c1cb96425ac5-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j" (UID: "28c859ed-db94-494c-afa3-c1cb96425ac5") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.407030 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-5f9bbdc844-56ngn"] Jan 30 11:09:34 crc kubenswrapper[4869]: W0130 11:09:34.509621 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf0d6cb91_dba9_4395_9438_8ab72ea16207.slice/crio-677caa9a5694a1fa1b4da17a26e92b78a4c9005015bc155029ffcfac7c33d84e WatchSource:0}: Error finding container 677caa9a5694a1fa1b4da17a26e92b78a4c9005015bc155029ffcfac7c33d84e: Status 404 returned error can't find the container with id 677caa9a5694a1fa1b4da17a26e92b78a4c9005015bc155029ffcfac7c33d84e Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.559395 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-q2f9n"] Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.579305 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-5bf648c946-89m4w" Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.650194 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-bxjxm" Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.689204 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-webhook-certs\") pod \"openstack-operator-controller-manager-5f57b98d7d-srxcx\" (UID: \"e5597fb4-0c80-4868-ae07-b38449e7a4af\") " pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.689324 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-metrics-certs\") pod \"openstack-operator-controller-manager-5f57b98d7d-srxcx\" (UID: \"e5597fb4-0c80-4868-ae07-b38449e7a4af\") " pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" Jan 30 11:09:34 crc kubenswrapper[4869]: E0130 11:09:34.689398 4869 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 30 11:09:34 crc kubenswrapper[4869]: E0130 11:09:34.689480 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-webhook-certs podName:e5597fb4-0c80-4868-ae07-b38449e7a4af nodeName:}" failed. No retries permitted until 2026-01-30 11:09:35.689457008 +0000 UTC m=+926.239333154 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-webhook-certs") pod "openstack-operator-controller-manager-5f57b98d7d-srxcx" (UID: "e5597fb4-0c80-4868-ae07-b38449e7a4af") : secret "webhook-server-cert" not found Jan 30 11:09:34 crc kubenswrapper[4869]: E0130 11:09:34.689893 4869 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 30 11:09:34 crc kubenswrapper[4869]: E0130 11:09:34.689926 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-metrics-certs podName:e5597fb4-0c80-4868-ae07-b38449e7a4af nodeName:}" failed. No retries permitted until 2026-01-30 11:09:35.689918481 +0000 UTC m=+926.239794547 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-metrics-certs") pod "openstack-operator-controller-manager-5f57b98d7d-srxcx" (UID: "e5597fb4-0c80-4868-ae07-b38449e7a4af") : secret "metrics-server-cert" not found Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.866898 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-8f4c5cb64-wgddf"] Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.894634 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/11587507-3c83-42d5-af04-3e352e7c7689-cert\") pod \"infra-operator-controller-manager-79955696d6-ghcvm\" (UID: \"11587507-3c83-42d5-af04-3e352e7c7689\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-ghcvm" Jan 30 11:09:34 crc kubenswrapper[4869]: E0130 11:09:34.894827 4869 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 30 11:09:34 crc kubenswrapper[4869]: E0130 11:09:34.894877 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/11587507-3c83-42d5-af04-3e352e7c7689-cert podName:11587507-3c83-42d5-af04-3e352e7c7689 nodeName:}" failed. No retries permitted until 2026-01-30 11:09:36.894859414 +0000 UTC m=+927.444735480 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/11587507-3c83-42d5-af04-3e352e7c7689-cert") pod "infra-operator-controller-manager-79955696d6-ghcvm" (UID: "11587507-3c83-42d5-af04-3e352e7c7689") : secret "infra-operator-webhook-server-cert" not found Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.895460 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6fd9bbb6f6-ql2pg"] Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.909546 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-784f59d4f4-hgkkg"] Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.967193 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-54985f5875-6m8mq"] Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.974296 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t2mgt" Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.978084 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-6c9d56f9bd-n5pbt"] Jan 30 11:09:34 crc kubenswrapper[4869]: W0130 11:09:34.979180 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podedcec497_d8a5_4cc4_b966_90bda3727925.slice/crio-4650ab50e0dc671728504903a1f31fe5041febd6f4a4d1696dae42a4e9b4bd05 WatchSource:0}: Error finding container 4650ab50e0dc671728504903a1f31fe5041febd6f4a4d1696dae42a4e9b4bd05: Status 404 returned error can't find the container with id 4650ab50e0dc671728504903a1f31fe5041febd6f4a4d1696dae42a4e9b4bd05 Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.997077 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53519751-f7c0-4f7c-8a3e-eea53b107ac5-utilities\") pod \"53519751-f7c0-4f7c-8a3e-eea53b107ac5\" (UID: \"53519751-f7c0-4f7c-8a3e-eea53b107ac5\") " Jan 30 11:09:34 crc kubenswrapper[4869]: I0130 11:09:34.998077 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53519751-f7c0-4f7c-8a3e-eea53b107ac5-catalog-content\") pod \"53519751-f7c0-4f7c-8a3e-eea53b107ac5\" (UID: \"53519751-f7c0-4f7c-8a3e-eea53b107ac5\") " Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:34.998170 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53519751-f7c0-4f7c-8a3e-eea53b107ac5-utilities" (OuterVolumeSpecName: "utilities") pod "53519751-f7c0-4f7c-8a3e-eea53b107ac5" (UID: "53519751-f7c0-4f7c-8a3e-eea53b107ac5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:34.998242 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4qxsb\" (UniqueName: \"kubernetes.io/projected/53519751-f7c0-4f7c-8a3e-eea53b107ac5-kube-api-access-4qxsb\") pod \"53519751-f7c0-4f7c-8a3e-eea53b107ac5\" (UID: \"53519751-f7c0-4f7c-8a3e-eea53b107ac5\") " Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.001029 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53519751-f7c0-4f7c-8a3e-eea53b107ac5-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.003977 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53519751-f7c0-4f7c-8a3e-eea53b107ac5-kube-api-access-4qxsb" (OuterVolumeSpecName: "kube-api-access-4qxsb") pod "53519751-f7c0-4f7c-8a3e-eea53b107ac5" (UID: "53519751-f7c0-4f7c-8a3e-eea53b107ac5"). InnerVolumeSpecName "kube-api-access-4qxsb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.116658 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4qxsb\" (UniqueName: \"kubernetes.io/projected/53519751-f7c0-4f7c-8a3e-eea53b107ac5-kube-api-access-4qxsb\") on node \"crc\" DevicePath \"\"" Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.261571 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-54985f5875-6m8mq" event={"ID":"edcec497-d8a5-4cc4-b966-90bda3727925","Type":"ContainerStarted","Data":"4650ab50e0dc671728504903a1f31fe5041febd6f4a4d1696dae42a4e9b4bd05"} Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.264676 4869 generic.go:334] "Generic (PLEG): container finished" podID="53519751-f7c0-4f7c-8a3e-eea53b107ac5" containerID="792e9406ea138473af74dbe1003420fac7362a786ccda7bd1e92741d70007847" exitCode=0 Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.264776 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t2mgt" event={"ID":"53519751-f7c0-4f7c-8a3e-eea53b107ac5","Type":"ContainerDied","Data":"792e9406ea138473af74dbe1003420fac7362a786ccda7bd1e92741d70007847"} Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.264795 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t2mgt" event={"ID":"53519751-f7c0-4f7c-8a3e-eea53b107ac5","Type":"ContainerDied","Data":"7d3fe0c3de9a568070c622317ee96bc5604b36daaae71e58fcb0ad01f7935fad"} Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.264810 4869 scope.go:117] "RemoveContainer" containerID="792e9406ea138473af74dbe1003420fac7362a786ccda7bd1e92741d70007847" Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.264906 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t2mgt" Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.266523 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-6c9d56f9bd-n5pbt" event={"ID":"495b2a0c-3cb3-4a22-8609-eb786d6c693f","Type":"ContainerStarted","Data":"78bd39a9e4263d6e6f68ce830569a9e3a5ec2585ccc63c83dfafd350e0727017"} Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.268322 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-q2f9n" event={"ID":"68a4d7f9-c03f-4552-8571-344434546d04","Type":"ContainerStarted","Data":"e141c3111a081e5a465294460133e067c4a6ee0e3514cb6b3d3c7197568a348f"} Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.269614 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-784f59d4f4-hgkkg" event={"ID":"818c8e4f-f344-4544-a562-35fd8865bdb9","Type":"ContainerStarted","Data":"d5df1a8b577a57c703a162ca9df11f181142a99eecf413705c672143ad122120"} Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.271005 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-5f9bbdc844-56ngn" event={"ID":"f0d6cb91-dba9-4395-9438-8ab72ea16207","Type":"ContainerStarted","Data":"677caa9a5694a1fa1b4da17a26e92b78a4c9005015bc155029ffcfac7c33d84e"} Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.272734 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6fd9bbb6f6-ql2pg" event={"ID":"e9e042ed-4c1e-430b-bb7e-fda28cbef607","Type":"ContainerStarted","Data":"c59b38c63f54907b4013bcc6e45b7142986013380ed4ede1362020a49471b7fb"} Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.273750 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-wgddf" event={"ID":"4eb7653e-3fa6-4479-a1de-66d77e70b60c","Type":"ContainerStarted","Data":"bb876b63d5b0beb216b697668b00557ba590b93f750467b360d9b4732f08dd01"} Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.288818 4869 scope.go:117] "RemoveContainer" containerID="55ffe1c93383c98324767eaaefcac338619832d42d0924387e0a53cd4b776072" Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.356305 4869 scope.go:117] "RemoveContainer" containerID="0b43e6242cbd9907eed84e82242bc3eafeee9fce40a9a6876941d7b884a3f80d" Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.385374 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-22kgf"] Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.390886 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6cfc4f6754-gcvf8"] Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.399864 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-694c6dcf95-m4d9d"] Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.407567 4869 scope.go:117] "RemoveContainer" containerID="792e9406ea138473af74dbe1003420fac7362a786ccda7bd1e92741d70007847" Jan 30 11:09:35 crc kubenswrapper[4869]: E0130 11:09:35.415597 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"792e9406ea138473af74dbe1003420fac7362a786ccda7bd1e92741d70007847\": container with ID starting with 792e9406ea138473af74dbe1003420fac7362a786ccda7bd1e92741d70007847 not found: ID does not exist" containerID="792e9406ea138473af74dbe1003420fac7362a786ccda7bd1e92741d70007847" Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.415636 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"792e9406ea138473af74dbe1003420fac7362a786ccda7bd1e92741d70007847"} err="failed to get container status \"792e9406ea138473af74dbe1003420fac7362a786ccda7bd1e92741d70007847\": rpc error: code = NotFound desc = could not find container \"792e9406ea138473af74dbe1003420fac7362a786ccda7bd1e92741d70007847\": container with ID starting with 792e9406ea138473af74dbe1003420fac7362a786ccda7bd1e92741d70007847 not found: ID does not exist" Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.415663 4869 scope.go:117] "RemoveContainer" containerID="55ffe1c93383c98324767eaaefcac338619832d42d0924387e0a53cd4b776072" Jan 30 11:09:35 crc kubenswrapper[4869]: E0130 11:09:35.416407 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55ffe1c93383c98324767eaaefcac338619832d42d0924387e0a53cd4b776072\": container with ID starting with 55ffe1c93383c98324767eaaefcac338619832d42d0924387e0a53cd4b776072 not found: ID does not exist" containerID="55ffe1c93383c98324767eaaefcac338619832d42d0924387e0a53cd4b776072" Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.416455 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55ffe1c93383c98324767eaaefcac338619832d42d0924387e0a53cd4b776072"} err="failed to get container status \"55ffe1c93383c98324767eaaefcac338619832d42d0924387e0a53cd4b776072\": rpc error: code = NotFound desc = could not find container \"55ffe1c93383c98324767eaaefcac338619832d42d0924387e0a53cd4b776072\": container with ID starting with 55ffe1c93383c98324767eaaefcac338619832d42d0924387e0a53cd4b776072 not found: ID does not exist" Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.416485 4869 scope.go:117] "RemoveContainer" containerID="0b43e6242cbd9907eed84e82242bc3eafeee9fce40a9a6876941d7b884a3f80d" Jan 30 11:09:35 crc kubenswrapper[4869]: E0130 11:09:35.416906 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b43e6242cbd9907eed84e82242bc3eafeee9fce40a9a6876941d7b884a3f80d\": container with ID starting with 0b43e6242cbd9907eed84e82242bc3eafeee9fce40a9a6876941d7b884a3f80d not found: ID does not exist" containerID="0b43e6242cbd9907eed84e82242bc3eafeee9fce40a9a6876941d7b884a3f80d" Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.416929 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b43e6242cbd9907eed84e82242bc3eafeee9fce40a9a6876941d7b884a3f80d"} err="failed to get container status \"0b43e6242cbd9907eed84e82242bc3eafeee9fce40a9a6876941d7b884a3f80d\": rpc error: code = NotFound desc = could not find container \"0b43e6242cbd9907eed84e82242bc3eafeee9fce40a9a6876941d7b884a3f80d\": container with ID starting with 0b43e6242cbd9907eed84e82242bc3eafeee9fce40a9a6876941d7b884a3f80d not found: ID does not exist" Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.432019 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-lvrcp"] Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.441854 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-67f5956bc9-vs89l"] Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.444936 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/28c859ed-db94-494c-afa3-c1cb96425ac5-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j\" (UID: \"28c859ed-db94-494c-afa3-c1cb96425ac5\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j" Jan 30 11:09:35 crc kubenswrapper[4869]: E0130 11:09:35.445096 4869 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 30 11:09:35 crc kubenswrapper[4869]: E0130 11:09:35.445165 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28c859ed-db94-494c-afa3-c1cb96425ac5-cert podName:28c859ed-db94-494c-afa3-c1cb96425ac5 nodeName:}" failed. No retries permitted until 2026-01-30 11:09:37.445146459 +0000 UTC m=+927.995022525 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/28c859ed-db94-494c-afa3-c1cb96425ac5-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j" (UID: "28c859ed-db94-494c-afa3-c1cb96425ac5") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.452538 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-74954f9f78-flsxj"] Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.466096 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-7d4f9d9c9b-8hsst"] Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.476183 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-tk8kg"] Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.486861 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cd99594-rjwgj"] Jan 30 11:09:35 crc kubenswrapper[4869]: W0130 11:09:35.487930 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6e096fd6_f27e_4561_a86b_8991d11d82e7.slice/crio-fb274d53c3fea79b56e0a5f58a5d1f3fdc4f0d5b96162602f3f2d78848bbde9a WatchSource:0}: Error finding container fb274d53c3fea79b56e0a5f58a5d1f3fdc4f0d5b96162602f3f2d78848bbde9a: Status 404 returned error can't find the container with id fb274d53c3fea79b56e0a5f58a5d1f3fdc4f0d5b96162602f3f2d78848bbde9a Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.493856 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-28mvf"] Jan 30 11:09:35 crc kubenswrapper[4869]: W0130 11:09:35.495849 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61332cae_942e_475a_85b9_2020908d8266.slice/crio-5b42ca299ed4464acb43f6c5530323852486d3c840a7701f7b912c2be2e10883 WatchSource:0}: Error finding container 5b42ca299ed4464acb43f6c5530323852486d3c840a7701f7b912c2be2e10883: Status 404 returned error can't find the container with id 5b42ca299ed4464acb43f6c5530323852486d3c840a7701f7b912c2be2e10883 Jan 30 11:09:35 crc kubenswrapper[4869]: W0130 11:09:35.500678 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod22150fc4_3e93_45fd_9301_f7b552f57f48.slice/crio-782f92be1ca367c6f01ff0ea0e73c3409e24328df283f2754aeabf5cfcf6c39d WatchSource:0}: Error finding container 782f92be1ca367c6f01ff0ea0e73c3409e24328df283f2754aeabf5cfcf6c39d: Status 404 returned error can't find the container with id 782f92be1ca367c6f01ff0ea0e73c3409e24328df283f2754aeabf5cfcf6c39d Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.502368 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-5bf648c946-89m4w"] Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.508634 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-bxjxm"] Jan 30 11:09:35 crc kubenswrapper[4869]: E0130 11:09:35.508809 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/lmiccini/neutron-operator@sha256:24a7033dccd09885beebba692a7951d5388284a36f285a97607971c10113354e,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xft99,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-6cfc4f6754-gcvf8_openstack-operators(22150fc4-3e93-45fd-9301-f7b552f57f48): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 30 11:09:35 crc kubenswrapper[4869]: E0130 11:09:35.510081 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/neutron-operator-controller-manager-6cfc4f6754-gcvf8" podUID="22150fc4-3e93-45fd-9301-f7b552f57f48" Jan 30 11:09:35 crc kubenswrapper[4869]: W0130 11:09:35.512869 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfffb7d27_e4fa_4ba1_8a0b_cc6e5d18bd29.slice/crio-bffb9a50d3d172972577658b82e37097d65c595a77b86832ee9820342e433718 WatchSource:0}: Error finding container bffb9a50d3d172972577658b82e37097d65c595a77b86832ee9820342e433718: Status 404 returned error can't find the container with id bffb9a50d3d172972577658b82e37097d65c595a77b86832ee9820342e433718 Jan 30 11:09:35 crc kubenswrapper[4869]: E0130 11:09:35.516397 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-n224x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-56f8bfcd9f-28mvf_openstack-operators(79e9a1e6-68d5-422a-9446-0d4f106f5f22): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 30 11:09:35 crc kubenswrapper[4869]: E0130 11:09:35.517705 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-28mvf" podUID="79e9a1e6-68d5-422a-9446-0d4f106f5f22" Jan 30 11:09:35 crc kubenswrapper[4869]: E0130 11:09:35.525774 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/lmiccini/watcher-operator@sha256:8049d4d17f301838dfbc3740629d57f9b29c08e779affbf96c4197dc4d1fe19b,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-k2h45,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-5bf648c946-89m4w_openstack-operators(fffb7d27-e4fa-4ba1-8a0b-cc6e5d18bd29): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 30 11:09:35 crc kubenswrapper[4869]: E0130 11:09:35.527822 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-5bf648c946-89m4w" podUID="fffb7d27-e4fa-4ba1-8a0b-cc6e5d18bd29" Jan 30 11:09:35 crc kubenswrapper[4869]: W0130 11:09:35.535999 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod64399a12_4f5e_42e8_bc4b_b80347287a30.slice/crio-9da977d4bb62cee7882d9f79465585e4f414579bde99b7da05646eee7a47e7e6 WatchSource:0}: Error finding container 9da977d4bb62cee7882d9f79465585e4f414579bde99b7da05646eee7a47e7e6: Status 404 returned error can't find the container with id 9da977d4bb62cee7882d9f79465585e4f414579bde99b7da05646eee7a47e7e6 Jan 30 11:09:35 crc kubenswrapper[4869]: E0130 11:09:35.542219 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/lmiccini/telemetry-operator@sha256:7316ef2da8e4d8df06b150058249eaed2aa4719491716a4422a8ee5d6a0c352f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kmjvp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cd99594-rjwgj_openstack-operators(64399a12-4f5e-42e8-bc4b-b80347287a30): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 30 11:09:35 crc kubenswrapper[4869]: E0130 11:09:35.544021 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-76cd99594-rjwgj" podUID="64399a12-4f5e-42e8-bc4b-b80347287a30" Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.749048 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-metrics-certs\") pod \"openstack-operator-controller-manager-5f57b98d7d-srxcx\" (UID: \"e5597fb4-0c80-4868-ae07-b38449e7a4af\") " pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" Jan 30 11:09:35 crc kubenswrapper[4869]: E0130 11:09:35.749269 4869 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 30 11:09:35 crc kubenswrapper[4869]: E0130 11:09:35.749322 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-metrics-certs podName:e5597fb4-0c80-4868-ae07-b38449e7a4af nodeName:}" failed. No retries permitted until 2026-01-30 11:09:37.74930826 +0000 UTC m=+928.299184326 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-metrics-certs") pod "openstack-operator-controller-manager-5f57b98d7d-srxcx" (UID: "e5597fb4-0c80-4868-ae07-b38449e7a4af") : secret "metrics-server-cert" not found Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.749636 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-webhook-certs\") pod \"openstack-operator-controller-manager-5f57b98d7d-srxcx\" (UID: \"e5597fb4-0c80-4868-ae07-b38449e7a4af\") " pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" Jan 30 11:09:35 crc kubenswrapper[4869]: E0130 11:09:35.749749 4869 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 30 11:09:35 crc kubenswrapper[4869]: E0130 11:09:35.749777 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-webhook-certs podName:e5597fb4-0c80-4868-ae07-b38449e7a4af nodeName:}" failed. No retries permitted until 2026-01-30 11:09:37.749770023 +0000 UTC m=+928.299646089 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-webhook-certs") pod "openstack-operator-controller-manager-5f57b98d7d-srxcx" (UID: "e5597fb4-0c80-4868-ae07-b38449e7a4af") : secret "webhook-server-cert" not found Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.947184 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53519751-f7c0-4f7c-8a3e-eea53b107ac5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "53519751-f7c0-4f7c-8a3e-eea53b107ac5" (UID: "53519751-f7c0-4f7c-8a3e-eea53b107ac5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:09:35 crc kubenswrapper[4869]: I0130 11:09:35.953024 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53519751-f7c0-4f7c-8a3e-eea53b107ac5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 11:09:36 crc kubenswrapper[4869]: I0130 11:09:36.237053 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-t2mgt"] Jan 30 11:09:36 crc kubenswrapper[4869]: I0130 11:09:36.245621 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-t2mgt"] Jan 30 11:09:36 crc kubenswrapper[4869]: I0130 11:09:36.283372 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-7d4f9d9c9b-8hsst" event={"ID":"6e096fd6-f27e-4561-a86b-8991d11d82e7","Type":"ContainerStarted","Data":"fb274d53c3fea79b56e0a5f58a5d1f3fdc4f0d5b96162602f3f2d78848bbde9a"} Jan 30 11:09:36 crc kubenswrapper[4869]: I0130 11:09:36.285231 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-22kgf" event={"ID":"484fb11b-1e2c-40c8-944d-a34a6fbaed79","Type":"ContainerStarted","Data":"6d90144410fa8999b5e4e451cc607b52e8df498bd66d56236b5997ddba35b5d1"} Jan 30 11:09:36 crc kubenswrapper[4869]: I0130 11:09:36.286726 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-tk8kg" event={"ID":"61332cae-942e-475a-85b9-2020908d8266","Type":"ContainerStarted","Data":"5b42ca299ed4464acb43f6c5530323852486d3c840a7701f7b912c2be2e10883"} Jan 30 11:09:36 crc kubenswrapper[4869]: I0130 11:09:36.288097 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-694c6dcf95-m4d9d" event={"ID":"757e291c-f7c9-4b61-9ed8-5e78c4ffe989","Type":"ContainerStarted","Data":"8ce199b3ab6ca73c3fdc5b29c27ce7baba46e95e7cbbcab7c4e7b86c3e28ad5e"} Jan 30 11:09:36 crc kubenswrapper[4869]: I0130 11:09:36.292525 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-bxjxm" event={"ID":"1361000f-8ad7-4e93-b7cc-c059e5ba6641","Type":"ContainerStarted","Data":"21fe19e6c1d520245b913a11f015188098d6a1b41ff67572b58574b75208d663"} Jan 30 11:09:36 crc kubenswrapper[4869]: I0130 11:09:36.296098 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-74954f9f78-flsxj" event={"ID":"0a3ad98c-dec5-417c-890a-227fcab3d149","Type":"ContainerStarted","Data":"2a88b9a1a792b6f431921f05d612e9c7f0b459fe899f8da56e0768811b1f5324"} Jan 30 11:09:36 crc kubenswrapper[4869]: I0130 11:09:36.298073 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6cfc4f6754-gcvf8" event={"ID":"22150fc4-3e93-45fd-9301-f7b552f57f48","Type":"ContainerStarted","Data":"782f92be1ca367c6f01ff0ea0e73c3409e24328df283f2754aeabf5cfcf6c39d"} Jan 30 11:09:36 crc kubenswrapper[4869]: I0130 11:09:36.299418 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-5bf648c946-89m4w" event={"ID":"fffb7d27-e4fa-4ba1-8a0b-cc6e5d18bd29","Type":"ContainerStarted","Data":"bffb9a50d3d172972577658b82e37097d65c595a77b86832ee9820342e433718"} Jan 30 11:09:36 crc kubenswrapper[4869]: E0130 11:09:36.300071 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/neutron-operator@sha256:24a7033dccd09885beebba692a7951d5388284a36f285a97607971c10113354e\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-6cfc4f6754-gcvf8" podUID="22150fc4-3e93-45fd-9301-f7b552f57f48" Jan 30 11:09:36 crc kubenswrapper[4869]: I0130 11:09:36.300356 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-lvrcp" event={"ID":"50843920-ef36-4230-8840-0d34b70f602b","Type":"ContainerStarted","Data":"9901deba58ade6e81666a5b7045eb10df4ea7be5077a3e0bba1abc1b9c3d103b"} Jan 30 11:09:36 crc kubenswrapper[4869]: E0130 11:09:36.301244 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/watcher-operator@sha256:8049d4d17f301838dfbc3740629d57f9b29c08e779affbf96c4197dc4d1fe19b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-5bf648c946-89m4w" podUID="fffb7d27-e4fa-4ba1-8a0b-cc6e5d18bd29" Jan 30 11:09:36 crc kubenswrapper[4869]: I0130 11:09:36.302157 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-28mvf" event={"ID":"79e9a1e6-68d5-422a-9446-0d4f106f5f22","Type":"ContainerStarted","Data":"1e0a0ecbcbedabc450491309c3ce8bca94fafaca1b95e496751e2acf65d1e631"} Jan 30 11:09:36 crc kubenswrapper[4869]: I0130 11:09:36.304619 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cd99594-rjwgj" event={"ID":"64399a12-4f5e-42e8-bc4b-b80347287a30","Type":"ContainerStarted","Data":"9da977d4bb62cee7882d9f79465585e4f414579bde99b7da05646eee7a47e7e6"} Jan 30 11:09:36 crc kubenswrapper[4869]: E0130 11:09:36.304870 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241\\\"\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-28mvf" podUID="79e9a1e6-68d5-422a-9446-0d4f106f5f22" Jan 30 11:09:36 crc kubenswrapper[4869]: I0130 11:09:36.305531 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-67f5956bc9-vs89l" event={"ID":"ab8e3667-4a4c-47df-b46c-1d3d5a315fe0","Type":"ContainerStarted","Data":"a91d56ce9ba1f9c99eb94cc171a05def5f5775e2d2f97684fa01e3151e34fb13"} Jan 30 11:09:36 crc kubenswrapper[4869]: E0130 11:09:36.306298 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/telemetry-operator@sha256:7316ef2da8e4d8df06b150058249eaed2aa4719491716a4422a8ee5d6a0c352f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-76cd99594-rjwgj" podUID="64399a12-4f5e-42e8-bc4b-b80347287a30" Jan 30 11:09:36 crc kubenswrapper[4869]: I0130 11:09:36.975478 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/11587507-3c83-42d5-af04-3e352e7c7689-cert\") pod \"infra-operator-controller-manager-79955696d6-ghcvm\" (UID: \"11587507-3c83-42d5-af04-3e352e7c7689\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-ghcvm" Jan 30 11:09:36 crc kubenswrapper[4869]: E0130 11:09:36.975677 4869 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 30 11:09:36 crc kubenswrapper[4869]: E0130 11:09:36.976110 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/11587507-3c83-42d5-af04-3e352e7c7689-cert podName:11587507-3c83-42d5-af04-3e352e7c7689 nodeName:}" failed. No retries permitted until 2026-01-30 11:09:40.976087674 +0000 UTC m=+931.525963740 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/11587507-3c83-42d5-af04-3e352e7c7689-cert") pod "infra-operator-controller-manager-79955696d6-ghcvm" (UID: "11587507-3c83-42d5-af04-3e352e7c7689") : secret "infra-operator-webhook-server-cert" not found Jan 30 11:09:37 crc kubenswrapper[4869]: E0130 11:09:37.332231 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/neutron-operator@sha256:24a7033dccd09885beebba692a7951d5388284a36f285a97607971c10113354e\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-6cfc4f6754-gcvf8" podUID="22150fc4-3e93-45fd-9301-f7b552f57f48" Jan 30 11:09:37 crc kubenswrapper[4869]: E0130 11:09:37.332294 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/telemetry-operator@sha256:7316ef2da8e4d8df06b150058249eaed2aa4719491716a4422a8ee5d6a0c352f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-76cd99594-rjwgj" podUID="64399a12-4f5e-42e8-bc4b-b80347287a30" Jan 30 11:09:37 crc kubenswrapper[4869]: E0130 11:09:37.332291 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241\\\"\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-28mvf" podUID="79e9a1e6-68d5-422a-9446-0d4f106f5f22" Jan 30 11:09:37 crc kubenswrapper[4869]: E0130 11:09:37.332373 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/watcher-operator@sha256:8049d4d17f301838dfbc3740629d57f9b29c08e779affbf96c4197dc4d1fe19b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-5bf648c946-89m4w" podUID="fffb7d27-e4fa-4ba1-8a0b-cc6e5d18bd29" Jan 30 11:09:37 crc kubenswrapper[4869]: I0130 11:09:37.487132 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/28c859ed-db94-494c-afa3-c1cb96425ac5-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j\" (UID: \"28c859ed-db94-494c-afa3-c1cb96425ac5\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j" Jan 30 11:09:37 crc kubenswrapper[4869]: E0130 11:09:37.487551 4869 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 30 11:09:37 crc kubenswrapper[4869]: E0130 11:09:37.487610 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28c859ed-db94-494c-afa3-c1cb96425ac5-cert podName:28c859ed-db94-494c-afa3-c1cb96425ac5 nodeName:}" failed. No retries permitted until 2026-01-30 11:09:41.487596137 +0000 UTC m=+932.037472203 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/28c859ed-db94-494c-afa3-c1cb96425ac5-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j" (UID: "28c859ed-db94-494c-afa3-c1cb96425ac5") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 30 11:09:37 crc kubenswrapper[4869]: I0130 11:09:37.791629 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-webhook-certs\") pod \"openstack-operator-controller-manager-5f57b98d7d-srxcx\" (UID: \"e5597fb4-0c80-4868-ae07-b38449e7a4af\") " pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" Jan 30 11:09:37 crc kubenswrapper[4869]: I0130 11:09:37.791745 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-metrics-certs\") pod \"openstack-operator-controller-manager-5f57b98d7d-srxcx\" (UID: \"e5597fb4-0c80-4868-ae07-b38449e7a4af\") " pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" Jan 30 11:09:37 crc kubenswrapper[4869]: E0130 11:09:37.791924 4869 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 30 11:09:37 crc kubenswrapper[4869]: E0130 11:09:37.791983 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-metrics-certs podName:e5597fb4-0c80-4868-ae07-b38449e7a4af nodeName:}" failed. No retries permitted until 2026-01-30 11:09:41.791965684 +0000 UTC m=+932.341841750 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-metrics-certs") pod "openstack-operator-controller-manager-5f57b98d7d-srxcx" (UID: "e5597fb4-0c80-4868-ae07-b38449e7a4af") : secret "metrics-server-cert" not found Jan 30 11:09:37 crc kubenswrapper[4869]: E0130 11:09:37.792361 4869 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 30 11:09:37 crc kubenswrapper[4869]: E0130 11:09:37.792401 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-webhook-certs podName:e5597fb4-0c80-4868-ae07-b38449e7a4af nodeName:}" failed. No retries permitted until 2026-01-30 11:09:41.792388686 +0000 UTC m=+932.342264752 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-webhook-certs") pod "openstack-operator-controller-manager-5f57b98d7d-srxcx" (UID: "e5597fb4-0c80-4868-ae07-b38449e7a4af") : secret "webhook-server-cert" not found Jan 30 11:09:38 crc kubenswrapper[4869]: I0130 11:09:38.152926 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53519751-f7c0-4f7c-8a3e-eea53b107ac5" path="/var/lib/kubelet/pods/53519751-f7c0-4f7c-8a3e-eea53b107ac5/volumes" Jan 30 11:09:39 crc kubenswrapper[4869]: I0130 11:09:39.366569 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-shvsz"] Jan 30 11:09:39 crc kubenswrapper[4869]: E0130 11:09:39.368872 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53519751-f7c0-4f7c-8a3e-eea53b107ac5" containerName="registry-server" Jan 30 11:09:39 crc kubenswrapper[4869]: I0130 11:09:39.368959 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="53519751-f7c0-4f7c-8a3e-eea53b107ac5" containerName="registry-server" Jan 30 11:09:39 crc kubenswrapper[4869]: E0130 11:09:39.369022 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53519751-f7c0-4f7c-8a3e-eea53b107ac5" containerName="extract-utilities" Jan 30 11:09:39 crc kubenswrapper[4869]: I0130 11:09:39.369074 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="53519751-f7c0-4f7c-8a3e-eea53b107ac5" containerName="extract-utilities" Jan 30 11:09:39 crc kubenswrapper[4869]: E0130 11:09:39.369143 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53519751-f7c0-4f7c-8a3e-eea53b107ac5" containerName="extract-content" Jan 30 11:09:39 crc kubenswrapper[4869]: I0130 11:09:39.369199 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="53519751-f7c0-4f7c-8a3e-eea53b107ac5" containerName="extract-content" Jan 30 11:09:39 crc kubenswrapper[4869]: I0130 11:09:39.369400 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="53519751-f7c0-4f7c-8a3e-eea53b107ac5" containerName="registry-server" Jan 30 11:09:39 crc kubenswrapper[4869]: I0130 11:09:39.373666 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-shvsz" Jan 30 11:09:39 crc kubenswrapper[4869]: I0130 11:09:39.378044 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-shvsz"] Jan 30 11:09:39 crc kubenswrapper[4869]: I0130 11:09:39.418753 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmvv6\" (UniqueName: \"kubernetes.io/projected/bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6-kube-api-access-dmvv6\") pod \"certified-operators-shvsz\" (UID: \"bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6\") " pod="openshift-marketplace/certified-operators-shvsz" Jan 30 11:09:39 crc kubenswrapper[4869]: I0130 11:09:39.418828 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6-utilities\") pod \"certified-operators-shvsz\" (UID: \"bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6\") " pod="openshift-marketplace/certified-operators-shvsz" Jan 30 11:09:39 crc kubenswrapper[4869]: I0130 11:09:39.419050 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6-catalog-content\") pod \"certified-operators-shvsz\" (UID: \"bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6\") " pod="openshift-marketplace/certified-operators-shvsz" Jan 30 11:09:39 crc kubenswrapper[4869]: I0130 11:09:39.520816 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmvv6\" (UniqueName: \"kubernetes.io/projected/bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6-kube-api-access-dmvv6\") pod \"certified-operators-shvsz\" (UID: \"bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6\") " pod="openshift-marketplace/certified-operators-shvsz" Jan 30 11:09:39 crc kubenswrapper[4869]: I0130 11:09:39.520894 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6-utilities\") pod \"certified-operators-shvsz\" (UID: \"bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6\") " pod="openshift-marketplace/certified-operators-shvsz" Jan 30 11:09:39 crc kubenswrapper[4869]: I0130 11:09:39.521022 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6-catalog-content\") pod \"certified-operators-shvsz\" (UID: \"bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6\") " pod="openshift-marketplace/certified-operators-shvsz" Jan 30 11:09:39 crc kubenswrapper[4869]: I0130 11:09:39.521612 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6-catalog-content\") pod \"certified-operators-shvsz\" (UID: \"bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6\") " pod="openshift-marketplace/certified-operators-shvsz" Jan 30 11:09:39 crc kubenswrapper[4869]: I0130 11:09:39.522249 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6-utilities\") pod \"certified-operators-shvsz\" (UID: \"bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6\") " pod="openshift-marketplace/certified-operators-shvsz" Jan 30 11:09:39 crc kubenswrapper[4869]: I0130 11:09:39.543181 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmvv6\" (UniqueName: \"kubernetes.io/projected/bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6-kube-api-access-dmvv6\") pod \"certified-operators-shvsz\" (UID: \"bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6\") " pod="openshift-marketplace/certified-operators-shvsz" Jan 30 11:09:39 crc kubenswrapper[4869]: I0130 11:09:39.703955 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-shvsz" Jan 30 11:09:41 crc kubenswrapper[4869]: I0130 11:09:41.042204 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/11587507-3c83-42d5-af04-3e352e7c7689-cert\") pod \"infra-operator-controller-manager-79955696d6-ghcvm\" (UID: \"11587507-3c83-42d5-af04-3e352e7c7689\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-ghcvm" Jan 30 11:09:41 crc kubenswrapper[4869]: E0130 11:09:41.042388 4869 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 30 11:09:41 crc kubenswrapper[4869]: E0130 11:09:41.042662 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/11587507-3c83-42d5-af04-3e352e7c7689-cert podName:11587507-3c83-42d5-af04-3e352e7c7689 nodeName:}" failed. No retries permitted until 2026-01-30 11:09:49.04264658 +0000 UTC m=+939.592522646 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/11587507-3c83-42d5-af04-3e352e7c7689-cert") pod "infra-operator-controller-manager-79955696d6-ghcvm" (UID: "11587507-3c83-42d5-af04-3e352e7c7689") : secret "infra-operator-webhook-server-cert" not found Jan 30 11:09:41 crc kubenswrapper[4869]: I0130 11:09:41.550862 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/28c859ed-db94-494c-afa3-c1cb96425ac5-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j\" (UID: \"28c859ed-db94-494c-afa3-c1cb96425ac5\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j" Jan 30 11:09:41 crc kubenswrapper[4869]: E0130 11:09:41.551063 4869 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 30 11:09:41 crc kubenswrapper[4869]: E0130 11:09:41.551151 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28c859ed-db94-494c-afa3-c1cb96425ac5-cert podName:28c859ed-db94-494c-afa3-c1cb96425ac5 nodeName:}" failed. No retries permitted until 2026-01-30 11:09:49.551125767 +0000 UTC m=+940.101001833 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/28c859ed-db94-494c-afa3-c1cb96425ac5-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j" (UID: "28c859ed-db94-494c-afa3-c1cb96425ac5") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 30 11:09:41 crc kubenswrapper[4869]: I0130 11:09:41.857788 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-metrics-certs\") pod \"openstack-operator-controller-manager-5f57b98d7d-srxcx\" (UID: \"e5597fb4-0c80-4868-ae07-b38449e7a4af\") " pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" Jan 30 11:09:41 crc kubenswrapper[4869]: I0130 11:09:41.858270 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-webhook-certs\") pod \"openstack-operator-controller-manager-5f57b98d7d-srxcx\" (UID: \"e5597fb4-0c80-4868-ae07-b38449e7a4af\") " pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" Jan 30 11:09:41 crc kubenswrapper[4869]: E0130 11:09:41.858535 4869 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 30 11:09:41 crc kubenswrapper[4869]: E0130 11:09:41.858668 4869 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 30 11:09:41 crc kubenswrapper[4869]: E0130 11:09:41.858676 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-webhook-certs podName:e5597fb4-0c80-4868-ae07-b38449e7a4af nodeName:}" failed. No retries permitted until 2026-01-30 11:09:49.858657094 +0000 UTC m=+940.408533160 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-webhook-certs") pod "openstack-operator-controller-manager-5f57b98d7d-srxcx" (UID: "e5597fb4-0c80-4868-ae07-b38449e7a4af") : secret "webhook-server-cert" not found Jan 30 11:09:41 crc kubenswrapper[4869]: E0130 11:09:41.858807 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-metrics-certs podName:e5597fb4-0c80-4868-ae07-b38449e7a4af nodeName:}" failed. No retries permitted until 2026-01-30 11:09:49.858783408 +0000 UTC m=+940.408659474 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-metrics-certs") pod "openstack-operator-controller-manager-5f57b98d7d-srxcx" (UID: "e5597fb4-0c80-4868-ae07-b38449e7a4af") : secret "metrics-server-cert" not found Jan 30 11:09:49 crc kubenswrapper[4869]: I0130 11:09:49.057734 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/11587507-3c83-42d5-af04-3e352e7c7689-cert\") pod \"infra-operator-controller-manager-79955696d6-ghcvm\" (UID: \"11587507-3c83-42d5-af04-3e352e7c7689\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-ghcvm" Jan 30 11:09:49 crc kubenswrapper[4869]: E0130 11:09:49.057920 4869 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 30 11:09:49 crc kubenswrapper[4869]: E0130 11:09:49.058531 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/11587507-3c83-42d5-af04-3e352e7c7689-cert podName:11587507-3c83-42d5-af04-3e352e7c7689 nodeName:}" failed. No retries permitted until 2026-01-30 11:10:05.058508221 +0000 UTC m=+955.608384367 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/11587507-3c83-42d5-af04-3e352e7c7689-cert") pod "infra-operator-controller-manager-79955696d6-ghcvm" (UID: "11587507-3c83-42d5-af04-3e352e7c7689") : secret "infra-operator-webhook-server-cert" not found Jan 30 11:09:49 crc kubenswrapper[4869]: I0130 11:09:49.566257 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/28c859ed-db94-494c-afa3-c1cb96425ac5-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j\" (UID: \"28c859ed-db94-494c-afa3-c1cb96425ac5\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j" Jan 30 11:09:49 crc kubenswrapper[4869]: E0130 11:09:49.566459 4869 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 30 11:09:49 crc kubenswrapper[4869]: E0130 11:09:49.566542 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28c859ed-db94-494c-afa3-c1cb96425ac5-cert podName:28c859ed-db94-494c-afa3-c1cb96425ac5 nodeName:}" failed. No retries permitted until 2026-01-30 11:10:05.566520144 +0000 UTC m=+956.116396220 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/28c859ed-db94-494c-afa3-c1cb96425ac5-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j" (UID: "28c859ed-db94-494c-afa3-c1cb96425ac5") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 30 11:09:49 crc kubenswrapper[4869]: I0130 11:09:49.870605 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-webhook-certs\") pod \"openstack-operator-controller-manager-5f57b98d7d-srxcx\" (UID: \"e5597fb4-0c80-4868-ae07-b38449e7a4af\") " pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" Jan 30 11:09:49 crc kubenswrapper[4869]: I0130 11:09:49.871013 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-metrics-certs\") pod \"openstack-operator-controller-manager-5f57b98d7d-srxcx\" (UID: \"e5597fb4-0c80-4868-ae07-b38449e7a4af\") " pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" Jan 30 11:09:49 crc kubenswrapper[4869]: E0130 11:09:49.870838 4869 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 30 11:09:49 crc kubenswrapper[4869]: E0130 11:09:49.871160 4869 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 30 11:09:49 crc kubenswrapper[4869]: E0130 11:09:49.871163 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-webhook-certs podName:e5597fb4-0c80-4868-ae07-b38449e7a4af nodeName:}" failed. No retries permitted until 2026-01-30 11:10:05.871136819 +0000 UTC m=+956.421012895 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-webhook-certs") pod "openstack-operator-controller-manager-5f57b98d7d-srxcx" (UID: "e5597fb4-0c80-4868-ae07-b38449e7a4af") : secret "webhook-server-cert" not found Jan 30 11:09:49 crc kubenswrapper[4869]: E0130 11:09:49.871249 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-metrics-certs podName:e5597fb4-0c80-4868-ae07-b38449e7a4af nodeName:}" failed. No retries permitted until 2026-01-30 11:10:05.871226902 +0000 UTC m=+956.421103058 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-metrics-certs") pod "openstack-operator-controller-manager-5f57b98d7d-srxcx" (UID: "e5597fb4-0c80-4868-ae07-b38449e7a4af") : secret "metrics-server-cert" not found Jan 30 11:09:52 crc kubenswrapper[4869]: E0130 11:09:52.575255 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/lmiccini/heat-operator@sha256:9f790ab2e5cc7137dd72c7b6232acb6c6646e421c597fa14c2389e8d76ff6f27" Jan 30 11:09:52 crc kubenswrapper[4869]: E0130 11:09:52.575461 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/lmiccini/heat-operator@sha256:9f790ab2e5cc7137dd72c7b6232acb6c6646e421c597fa14c2389e8d76ff6f27,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-b2748,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-54985f5875-6m8mq_openstack-operators(edcec497-d8a5-4cc4-b966-90bda3727925): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 11:09:52 crc kubenswrapper[4869]: E0130 11:09:52.576768 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-54985f5875-6m8mq" podUID="edcec497-d8a5-4cc4-b966-90bda3727925" Jan 30 11:09:53 crc kubenswrapper[4869]: E0130 11:09:53.459322 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/heat-operator@sha256:9f790ab2e5cc7137dd72c7b6232acb6c6646e421c597fa14c2389e8d76ff6f27\\\"\"" pod="openstack-operators/heat-operator-controller-manager-54985f5875-6m8mq" podUID="edcec497-d8a5-4cc4-b966-90bda3727925" Jan 30 11:09:53 crc kubenswrapper[4869]: E0130 11:09:53.961291 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:ea7b72b648a5bde2eebd804c2a5c1608d448a4892176c1b8d000c1eef4bb92b4" Jan 30 11:09:53 crc kubenswrapper[4869]: E0130 11:09:53.961467 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:ea7b72b648a5bde2eebd804c2a5c1608d448a4892176c1b8d000c1eef4bb92b4,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-sj6d5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-788c46999f-lvrcp_openstack-operators(50843920-ef36-4230-8840-0d34b70f602b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 11:09:53 crc kubenswrapper[4869]: E0130 11:09:53.962650 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-lvrcp" podUID="50843920-ef36-4230-8840-0d34b70f602b" Jan 30 11:09:54 crc kubenswrapper[4869]: E0130 11:09:54.470571 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:ea7b72b648a5bde2eebd804c2a5c1608d448a4892176c1b8d000c1eef4bb92b4\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-lvrcp" podUID="50843920-ef36-4230-8840-0d34b70f602b" Jan 30 11:09:54 crc kubenswrapper[4869]: E0130 11:09:54.793972 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/placement-operator@sha256:e0824d5d461ada59715eb3048ed9394c80abba09c45503f8f90ee3b34e525488" Jan 30 11:09:54 crc kubenswrapper[4869]: E0130 11:09:54.794185 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:e0824d5d461ada59715eb3048ed9394c80abba09c45503f8f90ee3b34e525488,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5qkf5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-5b964cf4cd-tk8kg_openstack-operators(61332cae-942e-475a-85b9-2020908d8266): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 11:09:54 crc kubenswrapper[4869]: E0130 11:09:54.795345 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-tk8kg" podUID="61332cae-942e-475a-85b9-2020908d8266" Jan 30 11:09:55 crc kubenswrapper[4869]: E0130 11:09:55.485494 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:e0824d5d461ada59715eb3048ed9394c80abba09c45503f8f90ee3b34e525488\\\"\"" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-tk8kg" podUID="61332cae-942e-475a-85b9-2020908d8266" Jan 30 11:09:56 crc kubenswrapper[4869]: E0130 11:09:56.608530 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/lmiccini/manila-operator@sha256:5e1f9d03f34fb9704b759d9c55c9b35235aa5103644a902f2e553499c8d64c2d" Jan 30 11:09:56 crc kubenswrapper[4869]: E0130 11:09:56.608731 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/lmiccini/manila-operator@sha256:5e1f9d03f34fb9704b759d9c55c9b35235aa5103644a902f2e553499c8d64c2d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mwg99,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-74954f9f78-flsxj_openstack-operators(0a3ad98c-dec5-417c-890a-227fcab3d149): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 11:09:56 crc kubenswrapper[4869]: E0130 11:09:56.610702 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-74954f9f78-flsxj" podUID="0a3ad98c-dec5-417c-890a-227fcab3d149" Jan 30 11:09:57 crc kubenswrapper[4869]: E0130 11:09:57.497702 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/manila-operator@sha256:5e1f9d03f34fb9704b759d9c55c9b35235aa5103644a902f2e553499c8d64c2d\\\"\"" pod="openstack-operators/manila-operator-controller-manager-74954f9f78-flsxj" podUID="0a3ad98c-dec5-417c-890a-227fcab3d149" Jan 30 11:09:58 crc kubenswrapper[4869]: E0130 11:09:58.382494 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Jan 30 11:09:58 crc kubenswrapper[4869]: E0130 11:09:58.382680 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-btc29,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-bxjxm_openstack-operators(1361000f-8ad7-4e93-b7cc-c059e5ba6641): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 11:09:58 crc kubenswrapper[4869]: E0130 11:09:58.383987 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-bxjxm" podUID="1361000f-8ad7-4e93-b7cc-c059e5ba6641" Jan 30 11:09:58 crc kubenswrapper[4869]: E0130 11:09:58.504227 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-bxjxm" podUID="1361000f-8ad7-4e93-b7cc-c059e5ba6641" Jan 30 11:09:59 crc kubenswrapper[4869]: I0130 11:09:59.728616 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-shvsz"] Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.524346 4869 generic.go:334] "Generic (PLEG): container finished" podID="bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6" containerID="b7f72cb4b79e0ea4fc3e772ba22abcdcbaee3d671f59a53a075f727570b5c1e3" exitCode=0 Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.524416 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-shvsz" event={"ID":"bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6","Type":"ContainerDied","Data":"b7f72cb4b79e0ea4fc3e772ba22abcdcbaee3d671f59a53a075f727570b5c1e3"} Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.524465 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-shvsz" event={"ID":"bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6","Type":"ContainerStarted","Data":"6ed63c0972221c9c82cef4d9bca716c3222c591cf14a5ad3675194f8340d942b"} Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.527929 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6cfc4f6754-gcvf8" event={"ID":"22150fc4-3e93-45fd-9301-f7b552f57f48","Type":"ContainerStarted","Data":"e75eae6ea24fecee0bcecdab7ed0825ad7a8ad5e6bc57588d10e7e0cfb867c7c"} Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.528254 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-6cfc4f6754-gcvf8" Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.532295 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-5bf648c946-89m4w" event={"ID":"fffb7d27-e4fa-4ba1-8a0b-cc6e5d18bd29","Type":"ContainerStarted","Data":"7af32f00dae6b79ffd4084cbccb40d9108ebb92d1361a2bb6f8197f8e4be0196"} Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.532506 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-5bf648c946-89m4w" Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.541789 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-694c6dcf95-m4d9d" event={"ID":"757e291c-f7c9-4b61-9ed8-5e78c4ffe989","Type":"ContainerStarted","Data":"4ab5444b732a3f6de7ebe17079428bdba63df0ef107f80d162067272a14b27ca"} Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.542271 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-694c6dcf95-m4d9d" Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.565965 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-28mvf" event={"ID":"79e9a1e6-68d5-422a-9446-0d4f106f5f22","Type":"ContainerStarted","Data":"2c6069a1da37e0dbb36135b605d6cbf873a51c75459a52fd994695c0c9cc42e2"} Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.566655 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-28mvf" Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.590906 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cd99594-rjwgj" event={"ID":"64399a12-4f5e-42e8-bc4b-b80347287a30","Type":"ContainerStarted","Data":"a150cf3a74976ea046684679cba500121a52f8be3a4a2242ae7b314673ef1562"} Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.591675 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cd99594-rjwgj" Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.614532 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-784f59d4f4-hgkkg" event={"ID":"818c8e4f-f344-4544-a562-35fd8865bdb9","Type":"ContainerStarted","Data":"8371155a0f7f93ad1b5d92303bee6885d9ccaeed1deb7016ed38cf924e584743"} Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.616067 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-784f59d4f4-hgkkg" Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.637797 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-wgddf" event={"ID":"4eb7653e-3fa6-4479-a1de-66d77e70b60c","Type":"ContainerStarted","Data":"97b344abadcae2a5d849ca9438329ffe7ddea0b1a0fe0200a580b429b6cf5bc1"} Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.638314 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-wgddf" Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.667033 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-67f5956bc9-vs89l" event={"ID":"ab8e3667-4a4c-47df-b46c-1d3d5a315fe0","Type":"ContainerStarted","Data":"456d478a073e932a2a4baf236865492c2ce5a705b651a9ab151fd71e9d5f8d7c"} Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.667202 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-67f5956bc9-vs89l" Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.678652 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-6c9d56f9bd-n5pbt" event={"ID":"495b2a0c-3cb3-4a22-8609-eb786d6c693f","Type":"ContainerStarted","Data":"820c584b9f354f16adf20e144a6a52904cc2a54aaeb2cf91bc9a9a84f6e9a623"} Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.678800 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-6c9d56f9bd-n5pbt" Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.683873 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-5f9bbdc844-56ngn" event={"ID":"f0d6cb91-dba9-4395-9438-8ab72ea16207","Type":"ContainerStarted","Data":"200bf2858ca26ede4dd35f8cf61a7086fc0ff6c54ef6b38d5ae9666e73a8b918"} Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.684077 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-5f9bbdc844-56ngn" Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.703989 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-566c8844c5-llwtw" event={"ID":"88221c24-f744-4a85-9f3e-cede7b0a4f67","Type":"ContainerStarted","Data":"28e0b3cb385041add064717a2227295b67d3b56a6983b895b065c8596ea486ea"} Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.704418 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-566c8844c5-llwtw" Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.724513 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6fd9bbb6f6-ql2pg" event={"ID":"e9e042ed-4c1e-430b-bb7e-fda28cbef607","Type":"ContainerStarted","Data":"69147475836c62aafc84ab393a6031db86eb60f71818d0050f2f49ba05b6e030"} Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.725302 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6fd9bbb6f6-ql2pg" Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.736115 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-7d4f9d9c9b-8hsst" event={"ID":"6e096fd6-f27e-4561-a86b-8991d11d82e7","Type":"ContainerStarted","Data":"072801c0d499c03ed0a80b9ae30e0c499b75c5b8e89b829a6508a2fd2f5d6bb4"} Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.737036 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-7d4f9d9c9b-8hsst" Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.748137 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-22kgf" event={"ID":"484fb11b-1e2c-40c8-944d-a34a6fbaed79","Type":"ContainerStarted","Data":"7ebefc42e6bc8879f66028200135c5312a8804c8b67e00ab5a3c549c24a8bea0"} Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.753790 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-q2f9n" event={"ID":"68a4d7f9-c03f-4552-8571-344434546d04","Type":"ContainerStarted","Data":"b506773978b85a0a9a8f37e24178d36c0cd7f6510e591c338fdc6c884170d7af"} Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.754049 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-q2f9n" Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.760288 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-6c9d56f9bd-n5pbt" podStartSLOduration=4.488878493 podStartE2EDuration="28.760272455s" podCreationTimestamp="2026-01-30 11:09:32 +0000 UTC" firstStartedPulling="2026-01-30 11:09:34.992428616 +0000 UTC m=+925.542304672" lastFinishedPulling="2026-01-30 11:09:59.263822568 +0000 UTC m=+949.813698634" observedRunningTime="2026-01-30 11:10:00.75693236 +0000 UTC m=+951.306808436" watchObservedRunningTime="2026-01-30 11:10:00.760272455 +0000 UTC m=+951.310148521" Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.893491 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-5f9bbdc844-56ngn" podStartSLOduration=4.145843766 podStartE2EDuration="28.893448748s" podCreationTimestamp="2026-01-30 11:09:32 +0000 UTC" firstStartedPulling="2026-01-30 11:09:34.51705139 +0000 UTC m=+925.066927456" lastFinishedPulling="2026-01-30 11:09:59.264656372 +0000 UTC m=+949.814532438" observedRunningTime="2026-01-30 11:10:00.884982748 +0000 UTC m=+951.434858814" watchObservedRunningTime="2026-01-30 11:10:00.893448748 +0000 UTC m=+951.443324964" Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.942299 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-67f5956bc9-vs89l" podStartSLOduration=4.094324904 podStartE2EDuration="27.942265315s" podCreationTimestamp="2026-01-30 11:09:33 +0000 UTC" firstStartedPulling="2026-01-30 11:09:35.416750362 +0000 UTC m=+925.966626428" lastFinishedPulling="2026-01-30 11:09:59.264690773 +0000 UTC m=+949.814566839" observedRunningTime="2026-01-30 11:10:00.934595427 +0000 UTC m=+951.484471493" watchObservedRunningTime="2026-01-30 11:10:00.942265315 +0000 UTC m=+951.492141381" Jan 30 11:10:00 crc kubenswrapper[4869]: I0130 11:10:00.977318 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-694c6dcf95-m4d9d" podStartSLOduration=4.114268031 podStartE2EDuration="27.97727537s" podCreationTimestamp="2026-01-30 11:09:33 +0000 UTC" firstStartedPulling="2026-01-30 11:09:35.4589319 +0000 UTC m=+926.008807966" lastFinishedPulling="2026-01-30 11:09:59.321939239 +0000 UTC m=+949.871815305" observedRunningTime="2026-01-30 11:10:00.960425611 +0000 UTC m=+951.510301687" watchObservedRunningTime="2026-01-30 11:10:00.97727537 +0000 UTC m=+951.527151436" Jan 30 11:10:01 crc kubenswrapper[4869]: I0130 11:10:01.010408 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-5bf648c946-89m4w" podStartSLOduration=4.138804389 podStartE2EDuration="28.010387371s" podCreationTimestamp="2026-01-30 11:09:33 +0000 UTC" firstStartedPulling="2026-01-30 11:09:35.525464241 +0000 UTC m=+926.075340307" lastFinishedPulling="2026-01-30 11:09:59.397047213 +0000 UTC m=+949.946923289" observedRunningTime="2026-01-30 11:10:01.008339363 +0000 UTC m=+951.558215429" watchObservedRunningTime="2026-01-30 11:10:01.010387371 +0000 UTC m=+951.560263437" Jan 30 11:10:01 crc kubenswrapper[4869]: I0130 11:10:01.049142 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-6cfc4f6754-gcvf8" podStartSLOduration=4.216750093 podStartE2EDuration="28.049107361s" podCreationTimestamp="2026-01-30 11:09:33 +0000 UTC" firstStartedPulling="2026-01-30 11:09:35.508613252 +0000 UTC m=+926.058489328" lastFinishedPulling="2026-01-30 11:09:59.34097053 +0000 UTC m=+949.890846596" observedRunningTime="2026-01-30 11:10:01.042564725 +0000 UTC m=+951.592440791" watchObservedRunningTime="2026-01-30 11:10:01.049107361 +0000 UTC m=+951.598983427" Jan 30 11:10:01 crc kubenswrapper[4869]: I0130 11:10:01.079371 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-784f59d4f4-hgkkg" podStartSLOduration=6.060783093 podStartE2EDuration="29.07934497s" podCreationTimestamp="2026-01-30 11:09:32 +0000 UTC" firstStartedPulling="2026-01-30 11:09:34.934347226 +0000 UTC m=+925.484223292" lastFinishedPulling="2026-01-30 11:09:57.952909103 +0000 UTC m=+948.502785169" observedRunningTime="2026-01-30 11:10:01.075656565 +0000 UTC m=+951.625532631" watchObservedRunningTime="2026-01-30 11:10:01.07934497 +0000 UTC m=+951.629221036" Jan 30 11:10:01 crc kubenswrapper[4869]: I0130 11:10:01.097836 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cd99594-rjwgj" podStartSLOduration=4.294214134 podStartE2EDuration="28.097809225s" podCreationTimestamp="2026-01-30 11:09:33 +0000 UTC" firstStartedPulling="2026-01-30 11:09:35.542038971 +0000 UTC m=+926.091915037" lastFinishedPulling="2026-01-30 11:09:59.345634062 +0000 UTC m=+949.895510128" observedRunningTime="2026-01-30 11:10:01.096325883 +0000 UTC m=+951.646201939" watchObservedRunningTime="2026-01-30 11:10:01.097809225 +0000 UTC m=+951.647685291" Jan 30 11:10:01 crc kubenswrapper[4869]: I0130 11:10:01.125994 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-28mvf" podStartSLOduration=4.278689444 podStartE2EDuration="28.125956625s" podCreationTimestamp="2026-01-30 11:09:33 +0000 UTC" firstStartedPulling="2026-01-30 11:09:35.516209718 +0000 UTC m=+926.066085784" lastFinishedPulling="2026-01-30 11:09:59.363476899 +0000 UTC m=+949.913352965" observedRunningTime="2026-01-30 11:10:01.121780176 +0000 UTC m=+951.671656242" watchObservedRunningTime="2026-01-30 11:10:01.125956625 +0000 UTC m=+951.675832691" Jan 30 11:10:01 crc kubenswrapper[4869]: I0130 11:10:01.144627 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-wgddf" podStartSLOduration=4.794195397 podStartE2EDuration="29.144595704s" podCreationTimestamp="2026-01-30 11:09:32 +0000 UTC" firstStartedPulling="2026-01-30 11:09:34.914147802 +0000 UTC m=+925.464023868" lastFinishedPulling="2026-01-30 11:09:59.264548109 +0000 UTC m=+949.814424175" observedRunningTime="2026-01-30 11:10:01.141691822 +0000 UTC m=+951.691567918" watchObservedRunningTime="2026-01-30 11:10:01.144595704 +0000 UTC m=+951.694471770" Jan 30 11:10:01 crc kubenswrapper[4869]: I0130 11:10:01.169796 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-7d4f9d9c9b-8hsst" podStartSLOduration=4.405431064 podStartE2EDuration="28.16977682s" podCreationTimestamp="2026-01-30 11:09:33 +0000 UTC" firstStartedPulling="2026-01-30 11:09:35.500346907 +0000 UTC m=+926.050222973" lastFinishedPulling="2026-01-30 11:09:59.264692663 +0000 UTC m=+949.814568729" observedRunningTime="2026-01-30 11:10:01.167975058 +0000 UTC m=+951.717851124" watchObservedRunningTime="2026-01-30 11:10:01.16977682 +0000 UTC m=+951.719652886" Jan 30 11:10:01 crc kubenswrapper[4869]: I0130 11:10:01.208672 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-22kgf" podStartSLOduration=5.299615519 podStartE2EDuration="28.208648914s" podCreationTimestamp="2026-01-30 11:09:33 +0000 UTC" firstStartedPulling="2026-01-30 11:09:35.457449468 +0000 UTC m=+926.007325534" lastFinishedPulling="2026-01-30 11:09:58.366482863 +0000 UTC m=+948.916358929" observedRunningTime="2026-01-30 11:10:01.207932103 +0000 UTC m=+951.757808179" watchObservedRunningTime="2026-01-30 11:10:01.208648914 +0000 UTC m=+951.758524980" Jan 30 11:10:01 crc kubenswrapper[4869]: I0130 11:10:01.249470 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-q2f9n" podStartSLOduration=4.639973206 podStartE2EDuration="29.249447613s" podCreationTimestamp="2026-01-30 11:09:32 +0000 UTC" firstStartedPulling="2026-01-30 11:09:34.65432475 +0000 UTC m=+925.204200816" lastFinishedPulling="2026-01-30 11:09:59.263799157 +0000 UTC m=+949.813675223" observedRunningTime="2026-01-30 11:10:01.241795896 +0000 UTC m=+951.791671962" watchObservedRunningTime="2026-01-30 11:10:01.249447613 +0000 UTC m=+951.799323679" Jan 30 11:10:01 crc kubenswrapper[4869]: I0130 11:10:01.278758 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-566c8844c5-llwtw" podStartSLOduration=6.22328129 podStartE2EDuration="29.278740045s" podCreationTimestamp="2026-01-30 11:09:32 +0000 UTC" firstStartedPulling="2026-01-30 11:09:34.184703568 +0000 UTC m=+924.734579634" lastFinishedPulling="2026-01-30 11:09:57.240162323 +0000 UTC m=+947.790038389" observedRunningTime="2026-01-30 11:10:01.27399392 +0000 UTC m=+951.823869986" watchObservedRunningTime="2026-01-30 11:10:01.278740045 +0000 UTC m=+951.828616111" Jan 30 11:10:01 crc kubenswrapper[4869]: I0130 11:10:01.315980 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6fd9bbb6f6-ql2pg" podStartSLOduration=4.9660854709999995 podStartE2EDuration="29.315964763s" podCreationTimestamp="2026-01-30 11:09:32 +0000 UTC" firstStartedPulling="2026-01-30 11:09:34.913904715 +0000 UTC m=+925.463780781" lastFinishedPulling="2026-01-30 11:09:59.263784007 +0000 UTC m=+949.813660073" observedRunningTime="2026-01-30 11:10:01.311843746 +0000 UTC m=+951.861719812" watchObservedRunningTime="2026-01-30 11:10:01.315964763 +0000 UTC m=+951.865840829" Jan 30 11:10:01 crc kubenswrapper[4869]: I0130 11:10:01.770349 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-shvsz" event={"ID":"bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6","Type":"ContainerStarted","Data":"26ab27b9cde0c925e5996a91059a227e165ee2ed8d53807e6abb06c5b8037926"} Jan 30 11:10:01 crc kubenswrapper[4869]: I0130 11:10:01.770745 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-22kgf" Jan 30 11:10:02 crc kubenswrapper[4869]: I0130 11:10:02.775856 4869 generic.go:334] "Generic (PLEG): container finished" podID="bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6" containerID="26ab27b9cde0c925e5996a91059a227e165ee2ed8d53807e6abb06c5b8037926" exitCode=0 Jan 30 11:10:02 crc kubenswrapper[4869]: I0130 11:10:02.775910 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-shvsz" event={"ID":"bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6","Type":"ContainerDied","Data":"26ab27b9cde0c925e5996a91059a227e165ee2ed8d53807e6abb06c5b8037926"} Jan 30 11:10:03 crc kubenswrapper[4869]: I0130 11:10:03.787618 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-shvsz" event={"ID":"bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6","Type":"ContainerStarted","Data":"26b8433bb11574a5cd7a341b5d33af785e966c4c4ea0ce653fd547bebd34e1c4"} Jan 30 11:10:03 crc kubenswrapper[4869]: I0130 11:10:03.819541 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-shvsz" podStartSLOduration=22.14726478 podStartE2EDuration="24.819524662s" podCreationTimestamp="2026-01-30 11:09:39 +0000 UTC" firstStartedPulling="2026-01-30 11:10:00.526148803 +0000 UTC m=+951.076024869" lastFinishedPulling="2026-01-30 11:10:03.198408695 +0000 UTC m=+953.748284751" observedRunningTime="2026-01-30 11:10:03.816158456 +0000 UTC m=+954.366034522" watchObservedRunningTime="2026-01-30 11:10:03.819524662 +0000 UTC m=+954.369400718" Jan 30 11:10:04 crc kubenswrapper[4869]: I0130 11:10:04.221588 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cd99594-rjwgj" Jan 30 11:10:04 crc kubenswrapper[4869]: I0130 11:10:04.328571 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-28mvf" Jan 30 11:10:04 crc kubenswrapper[4869]: I0130 11:10:04.353745 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-7d4f9d9c9b-8hsst" Jan 30 11:10:04 crc kubenswrapper[4869]: I0130 11:10:04.582525 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-5bf648c946-89m4w" Jan 30 11:10:05 crc kubenswrapper[4869]: I0130 11:10:05.107252 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/11587507-3c83-42d5-af04-3e352e7c7689-cert\") pod \"infra-operator-controller-manager-79955696d6-ghcvm\" (UID: \"11587507-3c83-42d5-af04-3e352e7c7689\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-ghcvm" Jan 30 11:10:05 crc kubenswrapper[4869]: I0130 11:10:05.112568 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/11587507-3c83-42d5-af04-3e352e7c7689-cert\") pod \"infra-operator-controller-manager-79955696d6-ghcvm\" (UID: \"11587507-3c83-42d5-af04-3e352e7c7689\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-ghcvm" Jan 30 11:10:05 crc kubenswrapper[4869]: I0130 11:10:05.218783 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-79955696d6-ghcvm" Jan 30 11:10:05 crc kubenswrapper[4869]: I0130 11:10:05.614494 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/28c859ed-db94-494c-afa3-c1cb96425ac5-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j\" (UID: \"28c859ed-db94-494c-afa3-c1cb96425ac5\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j" Jan 30 11:10:05 crc kubenswrapper[4869]: I0130 11:10:05.620167 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/28c859ed-db94-494c-afa3-c1cb96425ac5-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j\" (UID: \"28c859ed-db94-494c-afa3-c1cb96425ac5\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j" Jan 30 11:10:05 crc kubenswrapper[4869]: I0130 11:10:05.715386 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-ghcvm"] Jan 30 11:10:05 crc kubenswrapper[4869]: W0130 11:10:05.721318 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod11587507_3c83_42d5_af04_3e352e7c7689.slice/crio-49b0052b5d7959dea1efc08bba6237aedbc51a4dbe9a73a958ba6a4d8049fe67 WatchSource:0}: Error finding container 49b0052b5d7959dea1efc08bba6237aedbc51a4dbe9a73a958ba6a4d8049fe67: Status 404 returned error can't find the container with id 49b0052b5d7959dea1efc08bba6237aedbc51a4dbe9a73a958ba6a4d8049fe67 Jan 30 11:10:05 crc kubenswrapper[4869]: I0130 11:10:05.759925 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j" Jan 30 11:10:05 crc kubenswrapper[4869]: I0130 11:10:05.804649 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-79955696d6-ghcvm" event={"ID":"11587507-3c83-42d5-af04-3e352e7c7689","Type":"ContainerStarted","Data":"49b0052b5d7959dea1efc08bba6237aedbc51a4dbe9a73a958ba6a4d8049fe67"} Jan 30 11:10:05 crc kubenswrapper[4869]: I0130 11:10:05.919033 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-webhook-certs\") pod \"openstack-operator-controller-manager-5f57b98d7d-srxcx\" (UID: \"e5597fb4-0c80-4868-ae07-b38449e7a4af\") " pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" Jan 30 11:10:05 crc kubenswrapper[4869]: I0130 11:10:05.919365 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-metrics-certs\") pod \"openstack-operator-controller-manager-5f57b98d7d-srxcx\" (UID: \"e5597fb4-0c80-4868-ae07-b38449e7a4af\") " pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" Jan 30 11:10:05 crc kubenswrapper[4869]: I0130 11:10:05.922695 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-metrics-certs\") pod \"openstack-operator-controller-manager-5f57b98d7d-srxcx\" (UID: \"e5597fb4-0c80-4868-ae07-b38449e7a4af\") " pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" Jan 30 11:10:05 crc kubenswrapper[4869]: I0130 11:10:05.922746 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e5597fb4-0c80-4868-ae07-b38449e7a4af-webhook-certs\") pod \"openstack-operator-controller-manager-5f57b98d7d-srxcx\" (UID: \"e5597fb4-0c80-4868-ae07-b38449e7a4af\") " pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" Jan 30 11:10:06 crc kubenswrapper[4869]: I0130 11:10:06.139224 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" Jan 30 11:10:06 crc kubenswrapper[4869]: I0130 11:10:06.267661 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j"] Jan 30 11:10:06 crc kubenswrapper[4869]: W0130 11:10:06.277956 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28c859ed_db94_494c_afa3_c1cb96425ac5.slice/crio-dc28489f11942e4c1abb393242de7007a4b085a4234e8c341ecaf6b56cbd0e91 WatchSource:0}: Error finding container dc28489f11942e4c1abb393242de7007a4b085a4234e8c341ecaf6b56cbd0e91: Status 404 returned error can't find the container with id dc28489f11942e4c1abb393242de7007a4b085a4234e8c341ecaf6b56cbd0e91 Jan 30 11:10:06 crc kubenswrapper[4869]: I0130 11:10:06.600293 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx"] Jan 30 11:10:06 crc kubenswrapper[4869]: I0130 11:10:06.816931 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" event={"ID":"e5597fb4-0c80-4868-ae07-b38449e7a4af","Type":"ContainerStarted","Data":"51a0b2fabd2a4f858f80304891637910a42bc7dd865173d9d64173d67ab33bd1"} Jan 30 11:10:06 crc kubenswrapper[4869]: I0130 11:10:06.818464 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j" event={"ID":"28c859ed-db94-494c-afa3-c1cb96425ac5","Type":"ContainerStarted","Data":"dc28489f11942e4c1abb393242de7007a4b085a4234e8c341ecaf6b56cbd0e91"} Jan 30 11:10:07 crc kubenswrapper[4869]: I0130 11:10:07.860032 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" event={"ID":"e5597fb4-0c80-4868-ae07-b38449e7a4af","Type":"ContainerStarted","Data":"bc483e4d0bf4583ab242926a4ba19b38cc43c014829b3bba1f72e4aa5ba7c780"} Jan 30 11:10:07 crc kubenswrapper[4869]: I0130 11:10:07.861404 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" Jan 30 11:10:07 crc kubenswrapper[4869]: I0130 11:10:07.876091 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-lvrcp" event={"ID":"50843920-ef36-4230-8840-0d34b70f602b","Type":"ContainerStarted","Data":"882f07afbce568fbcc88030f7fd007297ddabb51b7c09e6b9fd601967a01838d"} Jan 30 11:10:07 crc kubenswrapper[4869]: I0130 11:10:07.876386 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-lvrcp" Jan 30 11:10:07 crc kubenswrapper[4869]: I0130 11:10:07.878619 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-54985f5875-6m8mq" event={"ID":"edcec497-d8a5-4cc4-b966-90bda3727925","Type":"ContainerStarted","Data":"f2fc9be6d5eed4d809046a7aa061959cdaafb7a76055cd63957a47f496bbe52c"} Jan 30 11:10:07 crc kubenswrapper[4869]: I0130 11:10:07.878889 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-54985f5875-6m8mq" Jan 30 11:10:07 crc kubenswrapper[4869]: I0130 11:10:07.908345 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" podStartSLOduration=34.90832191 podStartE2EDuration="34.90832191s" podCreationTimestamp="2026-01-30 11:09:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:10:07.901860867 +0000 UTC m=+958.451736943" watchObservedRunningTime="2026-01-30 11:10:07.90832191 +0000 UTC m=+958.458197986" Jan 30 11:10:07 crc kubenswrapper[4869]: I0130 11:10:07.916704 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-lvrcp" podStartSLOduration=3.735935652 podStartE2EDuration="34.916686678s" podCreationTimestamp="2026-01-30 11:09:33 +0000 UTC" firstStartedPulling="2026-01-30 11:09:35.458771786 +0000 UTC m=+926.008647852" lastFinishedPulling="2026-01-30 11:10:06.639522812 +0000 UTC m=+957.189398878" observedRunningTime="2026-01-30 11:10:07.914828405 +0000 UTC m=+958.464704471" watchObservedRunningTime="2026-01-30 11:10:07.916686678 +0000 UTC m=+958.466562744" Jan 30 11:10:07 crc kubenswrapper[4869]: I0130 11:10:07.935181 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-54985f5875-6m8mq" podStartSLOduration=4.334684492 podStartE2EDuration="35.935160483s" podCreationTimestamp="2026-01-30 11:09:32 +0000 UTC" firstStartedPulling="2026-01-30 11:09:34.983649067 +0000 UTC m=+925.533525133" lastFinishedPulling="2026-01-30 11:10:06.584125058 +0000 UTC m=+957.134001124" observedRunningTime="2026-01-30 11:10:07.929995526 +0000 UTC m=+958.479871602" watchObservedRunningTime="2026-01-30 11:10:07.935160483 +0000 UTC m=+958.485036549" Jan 30 11:10:09 crc kubenswrapper[4869]: I0130 11:10:09.704448 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-shvsz" Jan 30 11:10:09 crc kubenswrapper[4869]: I0130 11:10:09.704515 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-shvsz" Jan 30 11:10:09 crc kubenswrapper[4869]: I0130 11:10:09.747893 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-shvsz" Jan 30 11:10:09 crc kubenswrapper[4869]: I0130 11:10:09.927366 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-shvsz" Jan 30 11:10:10 crc kubenswrapper[4869]: I0130 11:10:10.558970 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-shvsz"] Jan 30 11:10:10 crc kubenswrapper[4869]: I0130 11:10:10.898122 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-79955696d6-ghcvm" event={"ID":"11587507-3c83-42d5-af04-3e352e7c7689","Type":"ContainerStarted","Data":"50b3ad6d9d6b0bab8709b10fafb80c4ddb96df5f92360c1ebdac4c0ea4719c1f"} Jan 30 11:10:10 crc kubenswrapper[4869]: I0130 11:10:10.898476 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-79955696d6-ghcvm" Jan 30 11:10:10 crc kubenswrapper[4869]: I0130 11:10:10.899719 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j" event={"ID":"28c859ed-db94-494c-afa3-c1cb96425ac5","Type":"ContainerStarted","Data":"9a99c2a244ef5025ad8c43a0b692cefd90c170cec0290ec70f0b2184fd92fffe"} Jan 30 11:10:10 crc kubenswrapper[4869]: I0130 11:10:10.899855 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j" Jan 30 11:10:10 crc kubenswrapper[4869]: I0130 11:10:10.901353 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-74954f9f78-flsxj" event={"ID":"0a3ad98c-dec5-417c-890a-227fcab3d149","Type":"ContainerStarted","Data":"02276f040804dec4f28d725219841cf04e3379f58e85663eb41ea617acfb646d"} Jan 30 11:10:10 crc kubenswrapper[4869]: I0130 11:10:10.901533 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-74954f9f78-flsxj" Jan 30 11:10:10 crc kubenswrapper[4869]: I0130 11:10:10.902756 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-tk8kg" event={"ID":"61332cae-942e-475a-85b9-2020908d8266","Type":"ContainerStarted","Data":"647a05f21e95a0570c82950526ac69e523eee310bab34f9dcfe01a6bc7950cb1"} Jan 30 11:10:10 crc kubenswrapper[4869]: I0130 11:10:10.917841 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-79955696d6-ghcvm" podStartSLOduration=34.102577117 podStartE2EDuration="38.917823854s" podCreationTimestamp="2026-01-30 11:09:32 +0000 UTC" firstStartedPulling="2026-01-30 11:10:05.72327618 +0000 UTC m=+956.273152246" lastFinishedPulling="2026-01-30 11:10:10.538522917 +0000 UTC m=+961.088398983" observedRunningTime="2026-01-30 11:10:10.915876018 +0000 UTC m=+961.465752094" watchObservedRunningTime="2026-01-30 11:10:10.917823854 +0000 UTC m=+961.467699940" Jan 30 11:10:10 crc kubenswrapper[4869]: I0130 11:10:10.934994 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-74954f9f78-flsxj" podStartSLOduration=2.8692468780000002 podStartE2EDuration="37.934977941s" podCreationTimestamp="2026-01-30 11:09:33 +0000 UTC" firstStartedPulling="2026-01-30 11:09:35.473570876 +0000 UTC m=+926.023446942" lastFinishedPulling="2026-01-30 11:10:10.539301939 +0000 UTC m=+961.089178005" observedRunningTime="2026-01-30 11:10:10.93281119 +0000 UTC m=+961.482687276" watchObservedRunningTime="2026-01-30 11:10:10.934977941 +0000 UTC m=+961.484854007" Jan 30 11:10:10 crc kubenswrapper[4869]: I0130 11:10:10.958516 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j" podStartSLOduration=33.699948348 podStartE2EDuration="37.958500809s" podCreationTimestamp="2026-01-30 11:09:33 +0000 UTC" firstStartedPulling="2026-01-30 11:10:06.280773289 +0000 UTC m=+956.830649355" lastFinishedPulling="2026-01-30 11:10:10.53932575 +0000 UTC m=+961.089201816" observedRunningTime="2026-01-30 11:10:10.955851694 +0000 UTC m=+961.505727760" watchObservedRunningTime="2026-01-30 11:10:10.958500809 +0000 UTC m=+961.508376875" Jan 30 11:10:10 crc kubenswrapper[4869]: I0130 11:10:10.974945 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-tk8kg" podStartSLOduration=2.9370277529999997 podStartE2EDuration="37.974926316s" podCreationTimestamp="2026-01-30 11:09:33 +0000 UTC" firstStartedPulling="2026-01-30 11:09:35.501761457 +0000 UTC m=+926.051637523" lastFinishedPulling="2026-01-30 11:10:10.53966002 +0000 UTC m=+961.089536086" observedRunningTime="2026-01-30 11:10:10.972822466 +0000 UTC m=+961.522698532" watchObservedRunningTime="2026-01-30 11:10:10.974926316 +0000 UTC m=+961.524802382" Jan 30 11:10:11 crc kubenswrapper[4869]: I0130 11:10:11.909014 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-shvsz" podUID="bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6" containerName="registry-server" containerID="cri-o://26b8433bb11574a5cd7a341b5d33af785e966c4c4ea0ce653fd547bebd34e1c4" gracePeriod=2 Jan 30 11:10:12 crc kubenswrapper[4869]: I0130 11:10:12.331213 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-shvsz" Jan 30 11:10:12 crc kubenswrapper[4869]: I0130 11:10:12.414246 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6-catalog-content\") pod \"bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6\" (UID: \"bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6\") " Jan 30 11:10:12 crc kubenswrapper[4869]: I0130 11:10:12.414335 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6-utilities\") pod \"bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6\" (UID: \"bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6\") " Jan 30 11:10:12 crc kubenswrapper[4869]: I0130 11:10:12.414378 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmvv6\" (UniqueName: \"kubernetes.io/projected/bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6-kube-api-access-dmvv6\") pod \"bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6\" (UID: \"bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6\") " Jan 30 11:10:12 crc kubenswrapper[4869]: I0130 11:10:12.415181 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6-utilities" (OuterVolumeSpecName: "utilities") pod "bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6" (UID: "bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:10:12 crc kubenswrapper[4869]: I0130 11:10:12.430664 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6-kube-api-access-dmvv6" (OuterVolumeSpecName: "kube-api-access-dmvv6") pod "bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6" (UID: "bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6"). InnerVolumeSpecName "kube-api-access-dmvv6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:10:12 crc kubenswrapper[4869]: I0130 11:10:12.516249 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 11:10:12 crc kubenswrapper[4869]: I0130 11:10:12.516298 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmvv6\" (UniqueName: \"kubernetes.io/projected/bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6-kube-api-access-dmvv6\") on node \"crc\" DevicePath \"\"" Jan 30 11:10:12 crc kubenswrapper[4869]: I0130 11:10:12.932548 4869 generic.go:334] "Generic (PLEG): container finished" podID="bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6" containerID="26b8433bb11574a5cd7a341b5d33af785e966c4c4ea0ce653fd547bebd34e1c4" exitCode=0 Jan 30 11:10:12 crc kubenswrapper[4869]: I0130 11:10:12.932595 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-shvsz" Jan 30 11:10:12 crc kubenswrapper[4869]: I0130 11:10:12.932611 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-shvsz" event={"ID":"bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6","Type":"ContainerDied","Data":"26b8433bb11574a5cd7a341b5d33af785e966c4c4ea0ce653fd547bebd34e1c4"} Jan 30 11:10:12 crc kubenswrapper[4869]: I0130 11:10:12.933282 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-shvsz" event={"ID":"bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6","Type":"ContainerDied","Data":"6ed63c0972221c9c82cef4d9bca716c3222c591cf14a5ad3675194f8340d942b"} Jan 30 11:10:12 crc kubenswrapper[4869]: I0130 11:10:12.933308 4869 scope.go:117] "RemoveContainer" containerID="26b8433bb11574a5cd7a341b5d33af785e966c4c4ea0ce653fd547bebd34e1c4" Jan 30 11:10:12 crc kubenswrapper[4869]: I0130 11:10:12.952137 4869 scope.go:117] "RemoveContainer" containerID="26ab27b9cde0c925e5996a91059a227e165ee2ed8d53807e6abb06c5b8037926" Jan 30 11:10:12 crc kubenswrapper[4869]: I0130 11:10:12.970912 4869 scope.go:117] "RemoveContainer" containerID="b7f72cb4b79e0ea4fc3e772ba22abcdcbaee3d671f59a53a075f727570b5c1e3" Jan 30 11:10:12 crc kubenswrapper[4869]: I0130 11:10:12.999203 4869 scope.go:117] "RemoveContainer" containerID="26b8433bb11574a5cd7a341b5d33af785e966c4c4ea0ce653fd547bebd34e1c4" Jan 30 11:10:13 crc kubenswrapper[4869]: E0130 11:10:13.000009 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26b8433bb11574a5cd7a341b5d33af785e966c4c4ea0ce653fd547bebd34e1c4\": container with ID starting with 26b8433bb11574a5cd7a341b5d33af785e966c4c4ea0ce653fd547bebd34e1c4 not found: ID does not exist" containerID="26b8433bb11574a5cd7a341b5d33af785e966c4c4ea0ce653fd547bebd34e1c4" Jan 30 11:10:13 crc kubenswrapper[4869]: I0130 11:10:13.000040 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26b8433bb11574a5cd7a341b5d33af785e966c4c4ea0ce653fd547bebd34e1c4"} err="failed to get container status \"26b8433bb11574a5cd7a341b5d33af785e966c4c4ea0ce653fd547bebd34e1c4\": rpc error: code = NotFound desc = could not find container \"26b8433bb11574a5cd7a341b5d33af785e966c4c4ea0ce653fd547bebd34e1c4\": container with ID starting with 26b8433bb11574a5cd7a341b5d33af785e966c4c4ea0ce653fd547bebd34e1c4 not found: ID does not exist" Jan 30 11:10:13 crc kubenswrapper[4869]: I0130 11:10:13.000060 4869 scope.go:117] "RemoveContainer" containerID="26ab27b9cde0c925e5996a91059a227e165ee2ed8d53807e6abb06c5b8037926" Jan 30 11:10:13 crc kubenswrapper[4869]: E0130 11:10:13.000839 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26ab27b9cde0c925e5996a91059a227e165ee2ed8d53807e6abb06c5b8037926\": container with ID starting with 26ab27b9cde0c925e5996a91059a227e165ee2ed8d53807e6abb06c5b8037926 not found: ID does not exist" containerID="26ab27b9cde0c925e5996a91059a227e165ee2ed8d53807e6abb06c5b8037926" Jan 30 11:10:13 crc kubenswrapper[4869]: I0130 11:10:13.000896 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26ab27b9cde0c925e5996a91059a227e165ee2ed8d53807e6abb06c5b8037926"} err="failed to get container status \"26ab27b9cde0c925e5996a91059a227e165ee2ed8d53807e6abb06c5b8037926\": rpc error: code = NotFound desc = could not find container \"26ab27b9cde0c925e5996a91059a227e165ee2ed8d53807e6abb06c5b8037926\": container with ID starting with 26ab27b9cde0c925e5996a91059a227e165ee2ed8d53807e6abb06c5b8037926 not found: ID does not exist" Jan 30 11:10:13 crc kubenswrapper[4869]: I0130 11:10:13.000929 4869 scope.go:117] "RemoveContainer" containerID="b7f72cb4b79e0ea4fc3e772ba22abcdcbaee3d671f59a53a075f727570b5c1e3" Jan 30 11:10:13 crc kubenswrapper[4869]: E0130 11:10:13.001291 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7f72cb4b79e0ea4fc3e772ba22abcdcbaee3d671f59a53a075f727570b5c1e3\": container with ID starting with b7f72cb4b79e0ea4fc3e772ba22abcdcbaee3d671f59a53a075f727570b5c1e3 not found: ID does not exist" containerID="b7f72cb4b79e0ea4fc3e772ba22abcdcbaee3d671f59a53a075f727570b5c1e3" Jan 30 11:10:13 crc kubenswrapper[4869]: I0130 11:10:13.001323 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7f72cb4b79e0ea4fc3e772ba22abcdcbaee3d671f59a53a075f727570b5c1e3"} err="failed to get container status \"b7f72cb4b79e0ea4fc3e772ba22abcdcbaee3d671f59a53a075f727570b5c1e3\": rpc error: code = NotFound desc = could not find container \"b7f72cb4b79e0ea4fc3e772ba22abcdcbaee3d671f59a53a075f727570b5c1e3\": container with ID starting with b7f72cb4b79e0ea4fc3e772ba22abcdcbaee3d671f59a53a075f727570b5c1e3 not found: ID does not exist" Jan 30 11:10:13 crc kubenswrapper[4869]: I0130 11:10:13.150754 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6" (UID: "bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:10:13 crc kubenswrapper[4869]: I0130 11:10:13.196842 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-566c8844c5-llwtw" Jan 30 11:10:13 crc kubenswrapper[4869]: I0130 11:10:13.219755 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-5f9bbdc844-56ngn" Jan 30 11:10:13 crc kubenswrapper[4869]: I0130 11:10:13.228209 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 11:10:13 crc kubenswrapper[4869]: I0130 11:10:13.278661 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-shvsz"] Jan 30 11:10:13 crc kubenswrapper[4869]: I0130 11:10:13.283859 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-wgddf" Jan 30 11:10:13 crc kubenswrapper[4869]: I0130 11:10:13.284249 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-784f59d4f4-hgkkg" Jan 30 11:10:13 crc kubenswrapper[4869]: I0130 11:10:13.297938 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-shvsz"] Jan 30 11:10:13 crc kubenswrapper[4869]: I0130 11:10:13.336868 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-54985f5875-6m8mq" Jan 30 11:10:13 crc kubenswrapper[4869]: I0130 11:10:13.353307 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-q2f9n" Jan 30 11:10:13 crc kubenswrapper[4869]: I0130 11:10:13.398952 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6fd9bbb6f6-ql2pg" Jan 30 11:10:13 crc kubenswrapper[4869]: I0130 11:10:13.477937 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-6c9d56f9bd-n5pbt" Jan 30 11:10:13 crc kubenswrapper[4869]: I0130 11:10:13.615679 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-6cfc4f6754-gcvf8" Jan 30 11:10:13 crc kubenswrapper[4869]: I0130 11:10:13.649260 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-22kgf" Jan 30 11:10:13 crc kubenswrapper[4869]: I0130 11:10:13.702737 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-67f5956bc9-vs89l" Jan 30 11:10:13 crc kubenswrapper[4869]: I0130 11:10:13.851039 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-694c6dcf95-m4d9d" Jan 30 11:10:13 crc kubenswrapper[4869]: I0130 11:10:13.881220 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-lvrcp" Jan 30 11:10:13 crc kubenswrapper[4869]: I0130 11:10:13.932350 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-tk8kg" Jan 30 11:10:14 crc kubenswrapper[4869]: I0130 11:10:14.140931 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6" path="/var/lib/kubelet/pods/bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6/volumes" Jan 30 11:10:15 crc kubenswrapper[4869]: I0130 11:10:15.224552 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-79955696d6-ghcvm" Jan 30 11:10:15 crc kubenswrapper[4869]: I0130 11:10:15.767102 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j" Jan 30 11:10:16 crc kubenswrapper[4869]: I0130 11:10:16.148102 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-5f57b98d7d-srxcx" Jan 30 11:10:23 crc kubenswrapper[4869]: I0130 11:10:23.000702 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-bxjxm" event={"ID":"1361000f-8ad7-4e93-b7cc-c059e5ba6641","Type":"ContainerStarted","Data":"505a405f471de133abf95b137e11ff6be106640e02ae9f563639de9dce739ac0"} Jan 30 11:10:23 crc kubenswrapper[4869]: I0130 11:10:23.021387 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-bxjxm" podStartSLOduration=3.005711804 podStartE2EDuration="50.02136756s" podCreationTimestamp="2026-01-30 11:09:33 +0000 UTC" firstStartedPulling="2026-01-30 11:09:35.521549629 +0000 UTC m=+926.071425685" lastFinishedPulling="2026-01-30 11:10:22.537205375 +0000 UTC m=+973.087081441" observedRunningTime="2026-01-30 11:10:23.016207503 +0000 UTC m=+973.566083589" watchObservedRunningTime="2026-01-30 11:10:23.02136756 +0000 UTC m=+973.571243626" Jan 30 11:10:23 crc kubenswrapper[4869]: I0130 11:10:23.839037 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-74954f9f78-flsxj" Jan 30 11:10:23 crc kubenswrapper[4869]: I0130 11:10:23.935449 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-tk8kg" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.058363 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-pfzzn"] Jan 30 11:10:38 crc kubenswrapper[4869]: E0130 11:10:38.060966 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6" containerName="extract-content" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.060984 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6" containerName="extract-content" Jan 30 11:10:38 crc kubenswrapper[4869]: E0130 11:10:38.061008 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6" containerName="registry-server" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.061014 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6" containerName="registry-server" Jan 30 11:10:38 crc kubenswrapper[4869]: E0130 11:10:38.061024 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6" containerName="extract-utilities" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.061031 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6" containerName="extract-utilities" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.061153 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb3205a4-bcd3-43d5-bd46-fe83ee6acfb6" containerName="registry-server" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.062027 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-pfzzn" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.065316 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.065519 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.067371 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.067478 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-hbm2z" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.068661 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/beb0033f-9a86-49d8-8508-37742393341e-config\") pod \"dnsmasq-dns-675f4bcbfc-pfzzn\" (UID: \"beb0033f-9a86-49d8-8508-37742393341e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-pfzzn" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.068793 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdgkt\" (UniqueName: \"kubernetes.io/projected/beb0033f-9a86-49d8-8508-37742393341e-kube-api-access-pdgkt\") pod \"dnsmasq-dns-675f4bcbfc-pfzzn\" (UID: \"beb0033f-9a86-49d8-8508-37742393341e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-pfzzn" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.073862 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-pfzzn"] Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.115787 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-dcv8s"] Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.117194 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-dcv8s" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.120316 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.145051 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-dcv8s"] Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.170677 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/beb0033f-9a86-49d8-8508-37742393341e-config\") pod \"dnsmasq-dns-675f4bcbfc-pfzzn\" (UID: \"beb0033f-9a86-49d8-8508-37742393341e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-pfzzn" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.170982 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdgkt\" (UniqueName: \"kubernetes.io/projected/beb0033f-9a86-49d8-8508-37742393341e-kube-api-access-pdgkt\") pod \"dnsmasq-dns-675f4bcbfc-pfzzn\" (UID: \"beb0033f-9a86-49d8-8508-37742393341e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-pfzzn" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.171549 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/beb0033f-9a86-49d8-8508-37742393341e-config\") pod \"dnsmasq-dns-675f4bcbfc-pfzzn\" (UID: \"beb0033f-9a86-49d8-8508-37742393341e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-pfzzn" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.194973 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdgkt\" (UniqueName: \"kubernetes.io/projected/beb0033f-9a86-49d8-8508-37742393341e-kube-api-access-pdgkt\") pod \"dnsmasq-dns-675f4bcbfc-pfzzn\" (UID: \"beb0033f-9a86-49d8-8508-37742393341e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-pfzzn" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.272519 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17f6b0df-4fba-4069-a10e-658776dc40bb-config\") pod \"dnsmasq-dns-78dd6ddcc-dcv8s\" (UID: \"17f6b0df-4fba-4069-a10e-658776dc40bb\") " pod="openstack/dnsmasq-dns-78dd6ddcc-dcv8s" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.272635 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/17f6b0df-4fba-4069-a10e-658776dc40bb-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-dcv8s\" (UID: \"17f6b0df-4fba-4069-a10e-658776dc40bb\") " pod="openstack/dnsmasq-dns-78dd6ddcc-dcv8s" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.272765 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrz8n\" (UniqueName: \"kubernetes.io/projected/17f6b0df-4fba-4069-a10e-658776dc40bb-kube-api-access-hrz8n\") pod \"dnsmasq-dns-78dd6ddcc-dcv8s\" (UID: \"17f6b0df-4fba-4069-a10e-658776dc40bb\") " pod="openstack/dnsmasq-dns-78dd6ddcc-dcv8s" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.373909 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrz8n\" (UniqueName: \"kubernetes.io/projected/17f6b0df-4fba-4069-a10e-658776dc40bb-kube-api-access-hrz8n\") pod \"dnsmasq-dns-78dd6ddcc-dcv8s\" (UID: \"17f6b0df-4fba-4069-a10e-658776dc40bb\") " pod="openstack/dnsmasq-dns-78dd6ddcc-dcv8s" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.374048 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17f6b0df-4fba-4069-a10e-658776dc40bb-config\") pod \"dnsmasq-dns-78dd6ddcc-dcv8s\" (UID: \"17f6b0df-4fba-4069-a10e-658776dc40bb\") " pod="openstack/dnsmasq-dns-78dd6ddcc-dcv8s" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.374075 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/17f6b0df-4fba-4069-a10e-658776dc40bb-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-dcv8s\" (UID: \"17f6b0df-4fba-4069-a10e-658776dc40bb\") " pod="openstack/dnsmasq-dns-78dd6ddcc-dcv8s" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.374966 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/17f6b0df-4fba-4069-a10e-658776dc40bb-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-dcv8s\" (UID: \"17f6b0df-4fba-4069-a10e-658776dc40bb\") " pod="openstack/dnsmasq-dns-78dd6ddcc-dcv8s" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.375175 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17f6b0df-4fba-4069-a10e-658776dc40bb-config\") pod \"dnsmasq-dns-78dd6ddcc-dcv8s\" (UID: \"17f6b0df-4fba-4069-a10e-658776dc40bb\") " pod="openstack/dnsmasq-dns-78dd6ddcc-dcv8s" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.381327 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-pfzzn" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.390063 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrz8n\" (UniqueName: \"kubernetes.io/projected/17f6b0df-4fba-4069-a10e-658776dc40bb-kube-api-access-hrz8n\") pod \"dnsmasq-dns-78dd6ddcc-dcv8s\" (UID: \"17f6b0df-4fba-4069-a10e-658776dc40bb\") " pod="openstack/dnsmasq-dns-78dd6ddcc-dcv8s" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.431158 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-dcv8s" Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.818198 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-pfzzn"] Jan 30 11:10:38 crc kubenswrapper[4869]: I0130 11:10:38.889579 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-dcv8s"] Jan 30 11:10:39 crc kubenswrapper[4869]: I0130 11:10:39.112329 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-dcv8s" event={"ID":"17f6b0df-4fba-4069-a10e-658776dc40bb","Type":"ContainerStarted","Data":"d4f401f3cb1fd47cb0bb1bf47a64cb1b889e727f9fc2b0b40f42713d4a032d2d"} Jan 30 11:10:39 crc kubenswrapper[4869]: I0130 11:10:39.113891 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-pfzzn" event={"ID":"beb0033f-9a86-49d8-8508-37742393341e","Type":"ContainerStarted","Data":"54e3047176f8ead575a0735c8d75dafb089fe27b6353833c0992d9cd4492a0fc"} Jan 30 11:10:40 crc kubenswrapper[4869]: I0130 11:10:40.900937 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-pfzzn"] Jan 30 11:10:40 crc kubenswrapper[4869]: I0130 11:10:40.936881 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-dn9dv"] Jan 30 11:10:40 crc kubenswrapper[4869]: I0130 11:10:40.943645 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-dn9dv" Jan 30 11:10:40 crc kubenswrapper[4869]: I0130 11:10:40.975154 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-dn9dv"] Jan 30 11:10:41 crc kubenswrapper[4869]: I0130 11:10:41.128447 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d5199a1-1aaa-473c-ab15-80b0fc26f92f-config\") pod \"dnsmasq-dns-666b6646f7-dn9dv\" (UID: \"0d5199a1-1aaa-473c-ab15-80b0fc26f92f\") " pod="openstack/dnsmasq-dns-666b6646f7-dn9dv" Jan 30 11:10:41 crc kubenswrapper[4869]: I0130 11:10:41.128559 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0d5199a1-1aaa-473c-ab15-80b0fc26f92f-dns-svc\") pod \"dnsmasq-dns-666b6646f7-dn9dv\" (UID: \"0d5199a1-1aaa-473c-ab15-80b0fc26f92f\") " pod="openstack/dnsmasq-dns-666b6646f7-dn9dv" Jan 30 11:10:41 crc kubenswrapper[4869]: I0130 11:10:41.128636 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-258rh\" (UniqueName: \"kubernetes.io/projected/0d5199a1-1aaa-473c-ab15-80b0fc26f92f-kube-api-access-258rh\") pod \"dnsmasq-dns-666b6646f7-dn9dv\" (UID: \"0d5199a1-1aaa-473c-ab15-80b0fc26f92f\") " pod="openstack/dnsmasq-dns-666b6646f7-dn9dv" Jan 30 11:10:41 crc kubenswrapper[4869]: I0130 11:10:41.185033 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-dcv8s"] Jan 30 11:10:41 crc kubenswrapper[4869]: I0130 11:10:41.214905 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-ckn8s"] Jan 30 11:10:41 crc kubenswrapper[4869]: I0130 11:10:41.216133 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-ckn8s" Jan 30 11:10:41 crc kubenswrapper[4869]: I0130 11:10:41.230808 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-258rh\" (UniqueName: \"kubernetes.io/projected/0d5199a1-1aaa-473c-ab15-80b0fc26f92f-kube-api-access-258rh\") pod \"dnsmasq-dns-666b6646f7-dn9dv\" (UID: \"0d5199a1-1aaa-473c-ab15-80b0fc26f92f\") " pod="openstack/dnsmasq-dns-666b6646f7-dn9dv" Jan 30 11:10:41 crc kubenswrapper[4869]: I0130 11:10:41.230903 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d5199a1-1aaa-473c-ab15-80b0fc26f92f-config\") pod \"dnsmasq-dns-666b6646f7-dn9dv\" (UID: \"0d5199a1-1aaa-473c-ab15-80b0fc26f92f\") " pod="openstack/dnsmasq-dns-666b6646f7-dn9dv" Jan 30 11:10:41 crc kubenswrapper[4869]: I0130 11:10:41.230944 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0d5199a1-1aaa-473c-ab15-80b0fc26f92f-dns-svc\") pod \"dnsmasq-dns-666b6646f7-dn9dv\" (UID: \"0d5199a1-1aaa-473c-ab15-80b0fc26f92f\") " pod="openstack/dnsmasq-dns-666b6646f7-dn9dv" Jan 30 11:10:41 crc kubenswrapper[4869]: I0130 11:10:41.232023 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0d5199a1-1aaa-473c-ab15-80b0fc26f92f-dns-svc\") pod \"dnsmasq-dns-666b6646f7-dn9dv\" (UID: \"0d5199a1-1aaa-473c-ab15-80b0fc26f92f\") " pod="openstack/dnsmasq-dns-666b6646f7-dn9dv" Jan 30 11:10:41 crc kubenswrapper[4869]: I0130 11:10:41.232890 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d5199a1-1aaa-473c-ab15-80b0fc26f92f-config\") pod \"dnsmasq-dns-666b6646f7-dn9dv\" (UID: \"0d5199a1-1aaa-473c-ab15-80b0fc26f92f\") " pod="openstack/dnsmasq-dns-666b6646f7-dn9dv" Jan 30 11:10:41 crc kubenswrapper[4869]: I0130 11:10:41.236992 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-ckn8s"] Jan 30 11:10:41 crc kubenswrapper[4869]: I0130 11:10:41.260956 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-258rh\" (UniqueName: \"kubernetes.io/projected/0d5199a1-1aaa-473c-ab15-80b0fc26f92f-kube-api-access-258rh\") pod \"dnsmasq-dns-666b6646f7-dn9dv\" (UID: \"0d5199a1-1aaa-473c-ab15-80b0fc26f92f\") " pod="openstack/dnsmasq-dns-666b6646f7-dn9dv" Jan 30 11:10:41 crc kubenswrapper[4869]: I0130 11:10:41.281575 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-dn9dv" Jan 30 11:10:41 crc kubenswrapper[4869]: I0130 11:10:41.332079 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88fa474b-ed48-4ca3-af15-7217e4a9a6df-config\") pod \"dnsmasq-dns-57d769cc4f-ckn8s\" (UID: \"88fa474b-ed48-4ca3-af15-7217e4a9a6df\") " pod="openstack/dnsmasq-dns-57d769cc4f-ckn8s" Jan 30 11:10:41 crc kubenswrapper[4869]: I0130 11:10:41.332185 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljjx8\" (UniqueName: \"kubernetes.io/projected/88fa474b-ed48-4ca3-af15-7217e4a9a6df-kube-api-access-ljjx8\") pod \"dnsmasq-dns-57d769cc4f-ckn8s\" (UID: \"88fa474b-ed48-4ca3-af15-7217e4a9a6df\") " pod="openstack/dnsmasq-dns-57d769cc4f-ckn8s" Jan 30 11:10:41 crc kubenswrapper[4869]: I0130 11:10:41.332217 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88fa474b-ed48-4ca3-af15-7217e4a9a6df-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-ckn8s\" (UID: \"88fa474b-ed48-4ca3-af15-7217e4a9a6df\") " pod="openstack/dnsmasq-dns-57d769cc4f-ckn8s" Jan 30 11:10:41 crc kubenswrapper[4869]: I0130 11:10:41.432975 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljjx8\" (UniqueName: \"kubernetes.io/projected/88fa474b-ed48-4ca3-af15-7217e4a9a6df-kube-api-access-ljjx8\") pod \"dnsmasq-dns-57d769cc4f-ckn8s\" (UID: \"88fa474b-ed48-4ca3-af15-7217e4a9a6df\") " pod="openstack/dnsmasq-dns-57d769cc4f-ckn8s" Jan 30 11:10:41 crc kubenswrapper[4869]: I0130 11:10:41.433029 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88fa474b-ed48-4ca3-af15-7217e4a9a6df-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-ckn8s\" (UID: \"88fa474b-ed48-4ca3-af15-7217e4a9a6df\") " pod="openstack/dnsmasq-dns-57d769cc4f-ckn8s" Jan 30 11:10:41 crc kubenswrapper[4869]: I0130 11:10:41.433096 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88fa474b-ed48-4ca3-af15-7217e4a9a6df-config\") pod \"dnsmasq-dns-57d769cc4f-ckn8s\" (UID: \"88fa474b-ed48-4ca3-af15-7217e4a9a6df\") " pod="openstack/dnsmasq-dns-57d769cc4f-ckn8s" Jan 30 11:10:41 crc kubenswrapper[4869]: I0130 11:10:41.434328 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88fa474b-ed48-4ca3-af15-7217e4a9a6df-config\") pod \"dnsmasq-dns-57d769cc4f-ckn8s\" (UID: \"88fa474b-ed48-4ca3-af15-7217e4a9a6df\") " pod="openstack/dnsmasq-dns-57d769cc4f-ckn8s" Jan 30 11:10:41 crc kubenswrapper[4869]: I0130 11:10:41.434373 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88fa474b-ed48-4ca3-af15-7217e4a9a6df-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-ckn8s\" (UID: \"88fa474b-ed48-4ca3-af15-7217e4a9a6df\") " pod="openstack/dnsmasq-dns-57d769cc4f-ckn8s" Jan 30 11:10:41 crc kubenswrapper[4869]: I0130 11:10:41.455010 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljjx8\" (UniqueName: \"kubernetes.io/projected/88fa474b-ed48-4ca3-af15-7217e4a9a6df-kube-api-access-ljjx8\") pod \"dnsmasq-dns-57d769cc4f-ckn8s\" (UID: \"88fa474b-ed48-4ca3-af15-7217e4a9a6df\") " pod="openstack/dnsmasq-dns-57d769cc4f-ckn8s" Jan 30 11:10:41 crc kubenswrapper[4869]: I0130 11:10:41.548488 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-ckn8s" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.001270 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-dn9dv"] Jan 30 11:10:42 crc kubenswrapper[4869]: W0130 11:10:42.006762 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0d5199a1_1aaa_473c_ab15_80b0fc26f92f.slice/crio-fd938a97381568b714913f416557e9c981d40415c096fa746cc07cc047d75276 WatchSource:0}: Error finding container fd938a97381568b714913f416557e9c981d40415c096fa746cc07cc047d75276: Status 404 returned error can't find the container with id fd938a97381568b714913f416557e9c981d40415c096fa746cc07cc047d75276 Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.060788 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.062486 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.065164 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-r8qz2" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.065442 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.065587 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.066353 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.066503 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.067435 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.068433 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.091679 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-ckn8s"] Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.101826 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 30 11:10:42 crc kubenswrapper[4869]: W0130 11:10:42.106257 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod88fa474b_ed48_4ca3_af15_7217e4a9a6df.slice/crio-9251236df24e7a1f9712353dbebd6c495a4a29c874a1232bb777886da462c40c WatchSource:0}: Error finding container 9251236df24e7a1f9712353dbebd6c495a4a29c874a1232bb777886da462c40c: Status 404 returned error can't find the container with id 9251236df24e7a1f9712353dbebd6c495a4a29c874a1232bb777886da462c40c Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.157057 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-ckn8s" event={"ID":"88fa474b-ed48-4ca3-af15-7217e4a9a6df","Type":"ContainerStarted","Data":"9251236df24e7a1f9712353dbebd6c495a4a29c874a1232bb777886da462c40c"} Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.158625 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-dn9dv" event={"ID":"0d5199a1-1aaa-473c-ab15-80b0fc26f92f","Type":"ContainerStarted","Data":"fd938a97381568b714913f416557e9c981d40415c096fa746cc07cc047d75276"} Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.259163 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-config-data\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.259218 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4d1e4183-a136-428f-9bd8-e857a603da8f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.259242 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.259375 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.259455 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4d1e4183-a136-428f-9bd8-e857a603da8f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.259486 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64b5r\" (UniqueName: \"kubernetes.io/projected/4d1e4183-a136-428f-9bd8-e857a603da8f-kube-api-access-64b5r\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.259538 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4d1e4183-a136-428f-9bd8-e857a603da8f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.259564 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.259874 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4d1e4183-a136-428f-9bd8-e857a603da8f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.259979 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4d1e4183-a136-428f-9bd8-e857a603da8f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.260022 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4d1e4183-a136-428f-9bd8-e857a603da8f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.362852 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4d1e4183-a136-428f-9bd8-e857a603da8f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.364080 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64b5r\" (UniqueName: \"kubernetes.io/projected/4d1e4183-a136-428f-9bd8-e857a603da8f-kube-api-access-64b5r\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.364110 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4d1e4183-a136-428f-9bd8-e857a603da8f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.364128 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.364194 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4d1e4183-a136-428f-9bd8-e857a603da8f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.364239 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4d1e4183-a136-428f-9bd8-e857a603da8f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.364264 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4d1e4183-a136-428f-9bd8-e857a603da8f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.364309 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-config-data\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.364344 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4d1e4183-a136-428f-9bd8-e857a603da8f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.364372 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.364400 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.365528 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.368006 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-config-data\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.368244 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4d1e4183-a136-428f-9bd8-e857a603da8f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.368825 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.369475 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4d1e4183-a136-428f-9bd8-e857a603da8f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.370691 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.370924 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4d1e4183-a136-428f-9bd8-e857a603da8f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.371334 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4d1e4183-a136-428f-9bd8-e857a603da8f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.377969 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4d1e4183-a136-428f-9bd8-e857a603da8f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.384123 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4d1e4183-a136-428f-9bd8-e857a603da8f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.394090 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64b5r\" (UniqueName: \"kubernetes.io/projected/4d1e4183-a136-428f-9bd8-e857a603da8f-kube-api-access-64b5r\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.403768 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.418376 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.420045 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.426443 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.426667 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.426798 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.426927 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.427037 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.427133 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.427278 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-nx5rm" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.434195 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.571786 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/15b1a123-3831-4fa6-bc52-3f0cf30953f9-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.571841 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/15b1a123-3831-4fa6-bc52-3f0cf30953f9-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.571947 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.571992 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/15b1a123-3831-4fa6-bc52-3f0cf30953f9-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.572013 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svbhm\" (UniqueName: \"kubernetes.io/projected/15b1a123-3831-4fa6-bc52-3f0cf30953f9-kube-api-access-svbhm\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.572123 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/15b1a123-3831-4fa6-bc52-3f0cf30953f9-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.572165 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.572207 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/15b1a123-3831-4fa6-bc52-3f0cf30953f9-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.572313 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/15b1a123-3831-4fa6-bc52-3f0cf30953f9-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.572400 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.572438 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.673729 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/15b1a123-3831-4fa6-bc52-3f0cf30953f9-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.673794 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/15b1a123-3831-4fa6-bc52-3f0cf30953f9-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.673841 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.673879 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/15b1a123-3831-4fa6-bc52-3f0cf30953f9-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.673898 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svbhm\" (UniqueName: \"kubernetes.io/projected/15b1a123-3831-4fa6-bc52-3f0cf30953f9-kube-api-access-svbhm\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.673926 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/15b1a123-3831-4fa6-bc52-3f0cf30953f9-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.673947 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.673968 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/15b1a123-3831-4fa6-bc52-3f0cf30953f9-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.674187 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/15b1a123-3831-4fa6-bc52-3f0cf30953f9-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.674216 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.674233 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.674516 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.674900 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/15b1a123-3831-4fa6-bc52-3f0cf30953f9-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.675553 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.675606 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.681147 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/15b1a123-3831-4fa6-bc52-3f0cf30953f9-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.681878 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.683375 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/15b1a123-3831-4fa6-bc52-3f0cf30953f9-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.684449 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/15b1a123-3831-4fa6-bc52-3f0cf30953f9-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.710592 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/15b1a123-3831-4fa6-bc52-3f0cf30953f9-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.712007 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.726303 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/15b1a123-3831-4fa6-bc52-3f0cf30953f9-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.747666 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svbhm\" (UniqueName: \"kubernetes.io/projected/15b1a123-3831-4fa6-bc52-3f0cf30953f9-kube-api-access-svbhm\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.777101 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:42 crc kubenswrapper[4869]: I0130 11:10:42.784662 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.633258 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.634809 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.637237 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.637608 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.637681 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.638006 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-nmg6z" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.640211 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.649386 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.791532 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/34532f6a-b213-422d-8126-d74d95c32497-operator-scripts\") pod \"openstack-galera-0\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.791581 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/34532f6a-b213-422d-8126-d74d95c32497-kolla-config\") pod \"openstack-galera-0\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.791616 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34532f6a-b213-422d-8126-d74d95c32497-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.791654 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsdtz\" (UniqueName: \"kubernetes.io/projected/34532f6a-b213-422d-8126-d74d95c32497-kube-api-access-xsdtz\") pod \"openstack-galera-0\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.791687 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/34532f6a-b213-422d-8126-d74d95c32497-config-data-default\") pod \"openstack-galera-0\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.791728 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/34532f6a-b213-422d-8126-d74d95c32497-config-data-generated\") pod \"openstack-galera-0\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.791766 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/34532f6a-b213-422d-8126-d74d95c32497-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.791808 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.893366 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/34532f6a-b213-422d-8126-d74d95c32497-config-data-default\") pod \"openstack-galera-0\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.893416 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/34532f6a-b213-422d-8126-d74d95c32497-config-data-generated\") pod \"openstack-galera-0\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.893462 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/34532f6a-b213-422d-8126-d74d95c32497-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.893528 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.893548 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/34532f6a-b213-422d-8126-d74d95c32497-operator-scripts\") pod \"openstack-galera-0\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.893566 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/34532f6a-b213-422d-8126-d74d95c32497-kolla-config\") pod \"openstack-galera-0\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.893593 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34532f6a-b213-422d-8126-d74d95c32497-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.893624 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsdtz\" (UniqueName: \"kubernetes.io/projected/34532f6a-b213-422d-8126-d74d95c32497-kube-api-access-xsdtz\") pod \"openstack-galera-0\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.894008 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/34532f6a-b213-422d-8126-d74d95c32497-config-data-generated\") pod \"openstack-galera-0\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.894571 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.894651 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/34532f6a-b213-422d-8126-d74d95c32497-config-data-default\") pod \"openstack-galera-0\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.895796 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/34532f6a-b213-422d-8126-d74d95c32497-operator-scripts\") pod \"openstack-galera-0\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.903288 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/34532f6a-b213-422d-8126-d74d95c32497-kolla-config\") pod \"openstack-galera-0\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.903348 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/34532f6a-b213-422d-8126-d74d95c32497-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.907332 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34532f6a-b213-422d-8126-d74d95c32497-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.910116 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsdtz\" (UniqueName: \"kubernetes.io/projected/34532f6a-b213-422d-8126-d74d95c32497-kube-api-access-xsdtz\") pod \"openstack-galera-0\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.923271 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " pod="openstack/openstack-galera-0" Jan 30 11:10:43 crc kubenswrapper[4869]: I0130 11:10:43.952541 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.106996 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.108677 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.111728 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-545w5" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.111987 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.114338 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.119024 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.123773 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.212836 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-cell1-galera-0\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.213124 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4618ceff-14a9-4866-aa22-e29767d8d7e4-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.213258 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4618ceff-14a9-4866-aa22-e29767d8d7e4-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.213359 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4618ceff-14a9-4866-aa22-e29767d8d7e4-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.213481 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4618ceff-14a9-4866-aa22-e29767d8d7e4-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.213579 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlcdh\" (UniqueName: \"kubernetes.io/projected/4618ceff-14a9-4866-aa22-e29767d8d7e4-kube-api-access-tlcdh\") pod \"openstack-cell1-galera-0\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.213661 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4618ceff-14a9-4866-aa22-e29767d8d7e4-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.213752 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4618ceff-14a9-4866-aa22-e29767d8d7e4-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.315114 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-cell1-galera-0\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.315184 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4618ceff-14a9-4866-aa22-e29767d8d7e4-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.315210 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4618ceff-14a9-4866-aa22-e29767d8d7e4-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.315243 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4618ceff-14a9-4866-aa22-e29767d8d7e4-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.315264 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4618ceff-14a9-4866-aa22-e29767d8d7e4-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.315295 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlcdh\" (UniqueName: \"kubernetes.io/projected/4618ceff-14a9-4866-aa22-e29767d8d7e4-kube-api-access-tlcdh\") pod \"openstack-cell1-galera-0\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.315314 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4618ceff-14a9-4866-aa22-e29767d8d7e4-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.315329 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4618ceff-14a9-4866-aa22-e29767d8d7e4-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.315427 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-cell1-galera-0\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.315955 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4618ceff-14a9-4866-aa22-e29767d8d7e4-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.316112 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4618ceff-14a9-4866-aa22-e29767d8d7e4-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.317004 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4618ceff-14a9-4866-aa22-e29767d8d7e4-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.317101 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4618ceff-14a9-4866-aa22-e29767d8d7e4-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.330463 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4618ceff-14a9-4866-aa22-e29767d8d7e4-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.334480 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4618ceff-14a9-4866-aa22-e29767d8d7e4-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.336674 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlcdh\" (UniqueName: \"kubernetes.io/projected/4618ceff-14a9-4866-aa22-e29767d8d7e4-kube-api-access-tlcdh\") pod \"openstack-cell1-galera-0\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.358767 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-cell1-galera-0\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.361538 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.362472 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.363977 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-65w4m" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.364049 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.364217 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.374888 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.416152 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-kolla-config\") pod \"memcached-0\" (UID: \"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b\") " pod="openstack/memcached-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.416229 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-combined-ca-bundle\") pod \"memcached-0\" (UID: \"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b\") " pod="openstack/memcached-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.416288 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-memcached-tls-certs\") pod \"memcached-0\" (UID: \"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b\") " pod="openstack/memcached-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.416325 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-config-data\") pod \"memcached-0\" (UID: \"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b\") " pod="openstack/memcached-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.416348 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4645t\" (UniqueName: \"kubernetes.io/projected/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-kube-api-access-4645t\") pod \"memcached-0\" (UID: \"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b\") " pod="openstack/memcached-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.457615 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.517849 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4645t\" (UniqueName: \"kubernetes.io/projected/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-kube-api-access-4645t\") pod \"memcached-0\" (UID: \"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b\") " pod="openstack/memcached-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.517897 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-kolla-config\") pod \"memcached-0\" (UID: \"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b\") " pod="openstack/memcached-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.517954 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-combined-ca-bundle\") pod \"memcached-0\" (UID: \"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b\") " pod="openstack/memcached-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.518013 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-memcached-tls-certs\") pod \"memcached-0\" (UID: \"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b\") " pod="openstack/memcached-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.518051 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-config-data\") pod \"memcached-0\" (UID: \"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b\") " pod="openstack/memcached-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.518873 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-config-data\") pod \"memcached-0\" (UID: \"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b\") " pod="openstack/memcached-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.519270 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-kolla-config\") pod \"memcached-0\" (UID: \"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b\") " pod="openstack/memcached-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.521903 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-combined-ca-bundle\") pod \"memcached-0\" (UID: \"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b\") " pod="openstack/memcached-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.526187 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-memcached-tls-certs\") pod \"memcached-0\" (UID: \"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b\") " pod="openstack/memcached-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.543924 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4645t\" (UniqueName: \"kubernetes.io/projected/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-kube-api-access-4645t\") pod \"memcached-0\" (UID: \"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b\") " pod="openstack/memcached-0" Jan 30 11:10:45 crc kubenswrapper[4869]: I0130 11:10:45.722587 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 30 11:10:46 crc kubenswrapper[4869]: I0130 11:10:46.886558 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Jan 30 11:10:46 crc kubenswrapper[4869]: I0130 11:10:46.887853 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 30 11:10:46 crc kubenswrapper[4869]: I0130 11:10:46.894788 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 30 11:10:46 crc kubenswrapper[4869]: I0130 11:10:46.895953 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-z7768" Jan 30 11:10:46 crc kubenswrapper[4869]: I0130 11:10:46.938536 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2r5zt\" (UniqueName: \"kubernetes.io/projected/836183b5-4755-4622-a1da-438a1ec0b119-kube-api-access-2r5zt\") pod \"kube-state-metrics-0\" (UID: \"836183b5-4755-4622-a1da-438a1ec0b119\") " pod="openstack/kube-state-metrics-0" Jan 30 11:10:47 crc kubenswrapper[4869]: I0130 11:10:47.039801 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2r5zt\" (UniqueName: \"kubernetes.io/projected/836183b5-4755-4622-a1da-438a1ec0b119-kube-api-access-2r5zt\") pod \"kube-state-metrics-0\" (UID: \"836183b5-4755-4622-a1da-438a1ec0b119\") " pod="openstack/kube-state-metrics-0" Jan 30 11:10:47 crc kubenswrapper[4869]: I0130 11:10:47.071521 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2r5zt\" (UniqueName: \"kubernetes.io/projected/836183b5-4755-4622-a1da-438a1ec0b119-kube-api-access-2r5zt\") pod \"kube-state-metrics-0\" (UID: \"836183b5-4755-4622-a1da-438a1ec0b119\") " pod="openstack/kube-state-metrics-0" Jan 30 11:10:47 crc kubenswrapper[4869]: I0130 11:10:47.224920 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.539545 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.545059 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.549516 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.549667 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.549732 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.549808 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.550190 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-fjzcn" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.550287 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.606443 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/286d79ce-b123-48b8-b8d1-9a1696fe00bb-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.606535 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/286d79ce-b123-48b8-b8d1-9a1696fe00bb-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.606606 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/286d79ce-b123-48b8-b8d1-9a1696fe00bb-config\") pod \"ovsdbserver-nb-0\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.606642 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqbrb\" (UniqueName: \"kubernetes.io/projected/286d79ce-b123-48b8-b8d1-9a1696fe00bb-kube-api-access-lqbrb\") pod \"ovsdbserver-nb-0\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.606676 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-nb-0\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.606704 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/286d79ce-b123-48b8-b8d1-9a1696fe00bb-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.606778 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/286d79ce-b123-48b8-b8d1-9a1696fe00bb-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.606813 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/286d79ce-b123-48b8-b8d1-9a1696fe00bb-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.708033 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-nb-0\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.708087 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/286d79ce-b123-48b8-b8d1-9a1696fe00bb-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.708122 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/286d79ce-b123-48b8-b8d1-9a1696fe00bb-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.708159 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/286d79ce-b123-48b8-b8d1-9a1696fe00bb-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.708219 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/286d79ce-b123-48b8-b8d1-9a1696fe00bb-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.708275 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/286d79ce-b123-48b8-b8d1-9a1696fe00bb-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.708332 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/286d79ce-b123-48b8-b8d1-9a1696fe00bb-config\") pod \"ovsdbserver-nb-0\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.708357 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqbrb\" (UniqueName: \"kubernetes.io/projected/286d79ce-b123-48b8-b8d1-9a1696fe00bb-kube-api-access-lqbrb\") pod \"ovsdbserver-nb-0\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.708446 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-nb-0\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.708761 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/286d79ce-b123-48b8-b8d1-9a1696fe00bb-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.713776 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/286d79ce-b123-48b8-b8d1-9a1696fe00bb-config\") pod \"ovsdbserver-nb-0\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.714223 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/286d79ce-b123-48b8-b8d1-9a1696fe00bb-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.714301 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/286d79ce-b123-48b8-b8d1-9a1696fe00bb-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.716511 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/286d79ce-b123-48b8-b8d1-9a1696fe00bb-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.724148 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqbrb\" (UniqueName: \"kubernetes.io/projected/286d79ce-b123-48b8-b8d1-9a1696fe00bb-kube-api-access-lqbrb\") pod \"ovsdbserver-nb-0\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.724871 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/286d79ce-b123-48b8-b8d1-9a1696fe00bb-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.731143 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-nb-0\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:50 crc kubenswrapper[4869]: I0130 11:10:50.871793 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 30 11:10:51 crc kubenswrapper[4869]: I0130 11:10:51.768746 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:10:51 crc kubenswrapper[4869]: I0130 11:10:51.768787 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.698257 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-gm6nb"] Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.699665 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gm6nb" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.710030 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.710613 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.710949 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-ndjrs" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.728831 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-gm6nb"] Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.747547 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f7eb4552-ad08-470d-b4c5-63c937f11717-var-run\") pod \"ovn-controller-gm6nb\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " pod="openstack/ovn-controller-gm6nb" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.747672 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7eb4552-ad08-470d-b4c5-63c937f11717-combined-ca-bundle\") pod \"ovn-controller-gm6nb\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " pod="openstack/ovn-controller-gm6nb" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.747723 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/f7eb4552-ad08-470d-b4c5-63c937f11717-var-run-ovn\") pod \"ovn-controller-gm6nb\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " pod="openstack/ovn-controller-gm6nb" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.747768 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/f7eb4552-ad08-470d-b4c5-63c937f11717-var-log-ovn\") pod \"ovn-controller-gm6nb\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " pod="openstack/ovn-controller-gm6nb" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.747838 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f7eb4552-ad08-470d-b4c5-63c937f11717-scripts\") pod \"ovn-controller-gm6nb\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " pod="openstack/ovn-controller-gm6nb" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.747872 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srpwq\" (UniqueName: \"kubernetes.io/projected/f7eb4552-ad08-470d-b4c5-63c937f11717-kube-api-access-srpwq\") pod \"ovn-controller-gm6nb\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " pod="openstack/ovn-controller-gm6nb" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.747902 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7eb4552-ad08-470d-b4c5-63c937f11717-ovn-controller-tls-certs\") pod \"ovn-controller-gm6nb\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " pod="openstack/ovn-controller-gm6nb" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.750047 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-jfzdq"] Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.753025 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.759103 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-jfzdq"] Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.850030 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e4264086-12ed-4655-9657-14083653d56d-var-run\") pod \"ovn-controller-ovs-jfzdq\" (UID: \"e4264086-12ed-4655-9657-14083653d56d\") " pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.850118 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/e4264086-12ed-4655-9657-14083653d56d-var-lib\") pod \"ovn-controller-ovs-jfzdq\" (UID: \"e4264086-12ed-4655-9657-14083653d56d\") " pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.850269 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7eb4552-ad08-470d-b4c5-63c937f11717-combined-ca-bundle\") pod \"ovn-controller-gm6nb\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " pod="openstack/ovn-controller-gm6nb" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.850362 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/f7eb4552-ad08-470d-b4c5-63c937f11717-var-log-ovn\") pod \"ovn-controller-gm6nb\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " pod="openstack/ovn-controller-gm6nb" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.850401 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f7eb4552-ad08-470d-b4c5-63c937f11717-scripts\") pod \"ovn-controller-gm6nb\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " pod="openstack/ovn-controller-gm6nb" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.850428 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srpwq\" (UniqueName: \"kubernetes.io/projected/f7eb4552-ad08-470d-b4c5-63c937f11717-kube-api-access-srpwq\") pod \"ovn-controller-gm6nb\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " pod="openstack/ovn-controller-gm6nb" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.850494 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/e4264086-12ed-4655-9657-14083653d56d-var-log\") pod \"ovn-controller-ovs-jfzdq\" (UID: \"e4264086-12ed-4655-9657-14083653d56d\") " pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.850528 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f7eb4552-ad08-470d-b4c5-63c937f11717-var-run\") pod \"ovn-controller-gm6nb\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " pod="openstack/ovn-controller-gm6nb" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.850645 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/f7eb4552-ad08-470d-b4c5-63c937f11717-var-run-ovn\") pod \"ovn-controller-gm6nb\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " pod="openstack/ovn-controller-gm6nb" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.850725 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64p22\" (UniqueName: \"kubernetes.io/projected/e4264086-12ed-4655-9657-14083653d56d-kube-api-access-64p22\") pod \"ovn-controller-ovs-jfzdq\" (UID: \"e4264086-12ed-4655-9657-14083653d56d\") " pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.850758 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e4264086-12ed-4655-9657-14083653d56d-scripts\") pod \"ovn-controller-ovs-jfzdq\" (UID: \"e4264086-12ed-4655-9657-14083653d56d\") " pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.850814 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/e4264086-12ed-4655-9657-14083653d56d-etc-ovs\") pod \"ovn-controller-ovs-jfzdq\" (UID: \"e4264086-12ed-4655-9657-14083653d56d\") " pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.850852 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7eb4552-ad08-470d-b4c5-63c937f11717-ovn-controller-tls-certs\") pod \"ovn-controller-gm6nb\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " pod="openstack/ovn-controller-gm6nb" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.851144 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/f7eb4552-ad08-470d-b4c5-63c937f11717-var-log-ovn\") pod \"ovn-controller-gm6nb\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " pod="openstack/ovn-controller-gm6nb" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.851465 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f7eb4552-ad08-470d-b4c5-63c937f11717-var-run\") pod \"ovn-controller-gm6nb\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " pod="openstack/ovn-controller-gm6nb" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.851466 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/f7eb4552-ad08-470d-b4c5-63c937f11717-var-run-ovn\") pod \"ovn-controller-gm6nb\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " pod="openstack/ovn-controller-gm6nb" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.852633 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f7eb4552-ad08-470d-b4c5-63c937f11717-scripts\") pod \"ovn-controller-gm6nb\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " pod="openstack/ovn-controller-gm6nb" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.859182 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7eb4552-ad08-470d-b4c5-63c937f11717-ovn-controller-tls-certs\") pod \"ovn-controller-gm6nb\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " pod="openstack/ovn-controller-gm6nb" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.872769 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srpwq\" (UniqueName: \"kubernetes.io/projected/f7eb4552-ad08-470d-b4c5-63c937f11717-kube-api-access-srpwq\") pod \"ovn-controller-gm6nb\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " pod="openstack/ovn-controller-gm6nb" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.872884 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7eb4552-ad08-470d-b4c5-63c937f11717-combined-ca-bundle\") pod \"ovn-controller-gm6nb\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " pod="openstack/ovn-controller-gm6nb" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.954022 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/e4264086-12ed-4655-9657-14083653d56d-var-lib\") pod \"ovn-controller-ovs-jfzdq\" (UID: \"e4264086-12ed-4655-9657-14083653d56d\") " pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.954133 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/e4264086-12ed-4655-9657-14083653d56d-var-log\") pod \"ovn-controller-ovs-jfzdq\" (UID: \"e4264086-12ed-4655-9657-14083653d56d\") " pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.954207 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64p22\" (UniqueName: \"kubernetes.io/projected/e4264086-12ed-4655-9657-14083653d56d-kube-api-access-64p22\") pod \"ovn-controller-ovs-jfzdq\" (UID: \"e4264086-12ed-4655-9657-14083653d56d\") " pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.954235 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e4264086-12ed-4655-9657-14083653d56d-scripts\") pod \"ovn-controller-ovs-jfzdq\" (UID: \"e4264086-12ed-4655-9657-14083653d56d\") " pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.954255 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/e4264086-12ed-4655-9657-14083653d56d-etc-ovs\") pod \"ovn-controller-ovs-jfzdq\" (UID: \"e4264086-12ed-4655-9657-14083653d56d\") " pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.954285 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e4264086-12ed-4655-9657-14083653d56d-var-run\") pod \"ovn-controller-ovs-jfzdq\" (UID: \"e4264086-12ed-4655-9657-14083653d56d\") " pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.954418 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e4264086-12ed-4655-9657-14083653d56d-var-run\") pod \"ovn-controller-ovs-jfzdq\" (UID: \"e4264086-12ed-4655-9657-14083653d56d\") " pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.954427 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/e4264086-12ed-4655-9657-14083653d56d-var-lib\") pod \"ovn-controller-ovs-jfzdq\" (UID: \"e4264086-12ed-4655-9657-14083653d56d\") " pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.954468 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/e4264086-12ed-4655-9657-14083653d56d-var-log\") pod \"ovn-controller-ovs-jfzdq\" (UID: \"e4264086-12ed-4655-9657-14083653d56d\") " pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.954483 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/e4264086-12ed-4655-9657-14083653d56d-etc-ovs\") pod \"ovn-controller-ovs-jfzdq\" (UID: \"e4264086-12ed-4655-9657-14083653d56d\") " pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.963253 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e4264086-12ed-4655-9657-14083653d56d-scripts\") pod \"ovn-controller-ovs-jfzdq\" (UID: \"e4264086-12ed-4655-9657-14083653d56d\") " pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:10:52 crc kubenswrapper[4869]: I0130 11:10:52.970471 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64p22\" (UniqueName: \"kubernetes.io/projected/e4264086-12ed-4655-9657-14083653d56d-kube-api-access-64p22\") pod \"ovn-controller-ovs-jfzdq\" (UID: \"e4264086-12ed-4655-9657-14083653d56d\") " pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:10:53 crc kubenswrapper[4869]: I0130 11:10:53.079415 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gm6nb" Jan 30 11:10:53 crc kubenswrapper[4869]: I0130 11:10:53.093603 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.522134 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.525110 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.527322 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.527521 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.527778 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.528017 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-86pd4" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.534703 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.581267 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.581320 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.581346 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-config\") pod \"ovsdbserver-sb-0\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.581405 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68kzn\" (UniqueName: \"kubernetes.io/projected/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-kube-api-access-68kzn\") pod \"ovsdbserver-sb-0\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.581555 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.581589 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.581614 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.581653 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.683893 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.683957 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.683984 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-config\") pod \"ovsdbserver-sb-0\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.684024 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68kzn\" (UniqueName: \"kubernetes.io/projected/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-kube-api-access-68kzn\") pod \"ovsdbserver-sb-0\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.684184 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.684534 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.684664 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.685332 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-config\") pod \"ovsdbserver-sb-0\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.685410 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.685436 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.685492 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.687404 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.689749 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.692598 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.693520 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.702791 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68kzn\" (UniqueName: \"kubernetes.io/projected/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-kube-api-access-68kzn\") pod \"ovsdbserver-sb-0\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.704144 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:54 crc kubenswrapper[4869]: I0130 11:10:54.850636 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 30 11:10:55 crc kubenswrapper[4869]: I0130 11:10:55.637842 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 30 11:10:56 crc kubenswrapper[4869]: E0130 11:10:56.025184 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Jan 30 11:10:56 crc kubenswrapper[4869]: E0130 11:10:56.025341 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pdgkt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-pfzzn_openstack(beb0033f-9a86-49d8-8508-37742393341e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 11:10:56 crc kubenswrapper[4869]: E0130 11:10:56.026489 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-pfzzn" podUID="beb0033f-9a86-49d8-8508-37742393341e" Jan 30 11:10:56 crc kubenswrapper[4869]: W0130 11:10:56.036209 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbfa9c8cb_c5b4_4112_ac55_e51ea9257a7b.slice/crio-0062f25b9c2855632489f4d07e01ff1e11614a600b576496c0f151bc82a1c9ba WatchSource:0}: Error finding container 0062f25b9c2855632489f4d07e01ff1e11614a600b576496c0f151bc82a1c9ba: Status 404 returned error can't find the container with id 0062f25b9c2855632489f4d07e01ff1e11614a600b576496c0f151bc82a1c9ba Jan 30 11:10:56 crc kubenswrapper[4869]: E0130 11:10:56.045060 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Jan 30 11:10:56 crc kubenswrapper[4869]: E0130 11:10:56.045229 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hrz8n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-dcv8s_openstack(17f6b0df-4fba-4069-a10e-658776dc40bb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 11:10:56 crc kubenswrapper[4869]: E0130 11:10:56.046435 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-dcv8s" podUID="17f6b0df-4fba-4069-a10e-658776dc40bb" Jan 30 11:10:56 crc kubenswrapper[4869]: I0130 11:10:56.063852 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 11:10:56 crc kubenswrapper[4869]: I0130 11:10:56.297918 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b","Type":"ContainerStarted","Data":"0062f25b9c2855632489f4d07e01ff1e11614a600b576496c0f151bc82a1c9ba"} Jan 30 11:10:56 crc kubenswrapper[4869]: I0130 11:10:56.515809 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 30 11:10:56 crc kubenswrapper[4869]: I0130 11:10:56.722553 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 30 11:10:56 crc kubenswrapper[4869]: I0130 11:10:56.856602 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 30 11:10:56 crc kubenswrapper[4869]: W0130 11:10:56.867987 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod15b1a123_3831_4fa6_bc52_3f0cf30953f9.slice/crio-cfb68ec5584822a1531ad52242e67d1b565587f9a72b028a0705a3db2d003cf3 WatchSource:0}: Error finding container cfb68ec5584822a1531ad52242e67d1b565587f9a72b028a0705a3db2d003cf3: Status 404 returned error can't find the container with id cfb68ec5584822a1531ad52242e67d1b565587f9a72b028a0705a3db2d003cf3 Jan 30 11:10:56 crc kubenswrapper[4869]: I0130 11:10:56.869377 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-dcv8s" Jan 30 11:10:56 crc kubenswrapper[4869]: I0130 11:10:56.879574 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 30 11:10:56 crc kubenswrapper[4869]: W0130 11:10:56.890984 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4618ceff_14a9_4866_aa22_e29767d8d7e4.slice/crio-7e97ede44f9dcded717a0d3ce3c86515c75392d1854bab5bbd0e148cbdfd9b57 WatchSource:0}: Error finding container 7e97ede44f9dcded717a0d3ce3c86515c75392d1854bab5bbd0e148cbdfd9b57: Status 404 returned error can't find the container with id 7e97ede44f9dcded717a0d3ce3c86515c75392d1854bab5bbd0e148cbdfd9b57 Jan 30 11:10:56 crc kubenswrapper[4869]: I0130 11:10:56.892800 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-pfzzn" Jan 30 11:10:56 crc kubenswrapper[4869]: I0130 11:10:56.932948 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hrz8n\" (UniqueName: \"kubernetes.io/projected/17f6b0df-4fba-4069-a10e-658776dc40bb-kube-api-access-hrz8n\") pod \"17f6b0df-4fba-4069-a10e-658776dc40bb\" (UID: \"17f6b0df-4fba-4069-a10e-658776dc40bb\") " Jan 30 11:10:56 crc kubenswrapper[4869]: I0130 11:10:56.933009 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/beb0033f-9a86-49d8-8508-37742393341e-config\") pod \"beb0033f-9a86-49d8-8508-37742393341e\" (UID: \"beb0033f-9a86-49d8-8508-37742393341e\") " Jan 30 11:10:56 crc kubenswrapper[4869]: I0130 11:10:56.933027 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/17f6b0df-4fba-4069-a10e-658776dc40bb-dns-svc\") pod \"17f6b0df-4fba-4069-a10e-658776dc40bb\" (UID: \"17f6b0df-4fba-4069-a10e-658776dc40bb\") " Jan 30 11:10:56 crc kubenswrapper[4869]: I0130 11:10:56.933068 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pdgkt\" (UniqueName: \"kubernetes.io/projected/beb0033f-9a86-49d8-8508-37742393341e-kube-api-access-pdgkt\") pod \"beb0033f-9a86-49d8-8508-37742393341e\" (UID: \"beb0033f-9a86-49d8-8508-37742393341e\") " Jan 30 11:10:56 crc kubenswrapper[4869]: I0130 11:10:56.933211 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17f6b0df-4fba-4069-a10e-658776dc40bb-config\") pod \"17f6b0df-4fba-4069-a10e-658776dc40bb\" (UID: \"17f6b0df-4fba-4069-a10e-658776dc40bb\") " Jan 30 11:10:56 crc kubenswrapper[4869]: I0130 11:10:56.934379 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17f6b0df-4fba-4069-a10e-658776dc40bb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "17f6b0df-4fba-4069-a10e-658776dc40bb" (UID: "17f6b0df-4fba-4069-a10e-658776dc40bb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:10:56 crc kubenswrapper[4869]: I0130 11:10:56.934476 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/beb0033f-9a86-49d8-8508-37742393341e-config" (OuterVolumeSpecName: "config") pod "beb0033f-9a86-49d8-8508-37742393341e" (UID: "beb0033f-9a86-49d8-8508-37742393341e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:10:56 crc kubenswrapper[4869]: I0130 11:10:56.934575 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17f6b0df-4fba-4069-a10e-658776dc40bb-config" (OuterVolumeSpecName: "config") pod "17f6b0df-4fba-4069-a10e-658776dc40bb" (UID: "17f6b0df-4fba-4069-a10e-658776dc40bb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:10:56 crc kubenswrapper[4869]: I0130 11:10:56.939669 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/beb0033f-9a86-49d8-8508-37742393341e-kube-api-access-pdgkt" (OuterVolumeSpecName: "kube-api-access-pdgkt") pod "beb0033f-9a86-49d8-8508-37742393341e" (UID: "beb0033f-9a86-49d8-8508-37742393341e"). InnerVolumeSpecName "kube-api-access-pdgkt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:10:56 crc kubenswrapper[4869]: I0130 11:10:56.939784 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17f6b0df-4fba-4069-a10e-658776dc40bb-kube-api-access-hrz8n" (OuterVolumeSpecName: "kube-api-access-hrz8n") pod "17f6b0df-4fba-4069-a10e-658776dc40bb" (UID: "17f6b0df-4fba-4069-a10e-658776dc40bb"). InnerVolumeSpecName "kube-api-access-hrz8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.013441 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-jfzdq"] Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.035421 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17f6b0df-4fba-4069-a10e-658776dc40bb-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.035444 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hrz8n\" (UniqueName: \"kubernetes.io/projected/17f6b0df-4fba-4069-a10e-658776dc40bb-kube-api-access-hrz8n\") on node \"crc\" DevicePath \"\"" Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.035455 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/beb0033f-9a86-49d8-8508-37742393341e-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.035463 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/17f6b0df-4fba-4069-a10e-658776dc40bb-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.035471 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pdgkt\" (UniqueName: \"kubernetes.io/projected/beb0033f-9a86-49d8-8508-37742393341e-kube-api-access-pdgkt\") on node \"crc\" DevicePath \"\"" Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.057359 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.064427 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-gm6nb"] Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.139682 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 30 11:10:57 crc kubenswrapper[4869]: W0130 11:10:57.143161 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaefb9658_d09a_4e8d_9769_3d6133bd4b2c.slice/crio-3bf97457298a17a8e32c7781ad918e1cf1b20c3fbd5c9533c79c400ac324542f WatchSource:0}: Error finding container 3bf97457298a17a8e32c7781ad918e1cf1b20c3fbd5c9533c79c400ac324542f: Status 404 returned error can't find the container with id 3bf97457298a17a8e32c7781ad918e1cf1b20c3fbd5c9533c79c400ac324542f Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.317049 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"aefb9658-d09a-4e8d-9769-3d6133bd4b2c","Type":"ContainerStarted","Data":"3bf97457298a17a8e32c7781ad918e1cf1b20c3fbd5c9533c79c400ac324542f"} Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.319138 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-jfzdq" event={"ID":"e4264086-12ed-4655-9657-14083653d56d","Type":"ContainerStarted","Data":"c12a8f1c5bed38e4e77664e98d4e2d1081bd60ab2a60dc82963d86b72ee2cbae"} Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.321280 4869 generic.go:334] "Generic (PLEG): container finished" podID="88fa474b-ed48-4ca3-af15-7217e4a9a6df" containerID="048ac12ad4445822a7891e047b3fe07de25e6ce35957da9df8f7cb93db63a633" exitCode=0 Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.321334 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-ckn8s" event={"ID":"88fa474b-ed48-4ca3-af15-7217e4a9a6df","Type":"ContainerDied","Data":"048ac12ad4445822a7891e047b3fe07de25e6ce35957da9df8f7cb93db63a633"} Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.322852 4869 generic.go:334] "Generic (PLEG): container finished" podID="0d5199a1-1aaa-473c-ab15-80b0fc26f92f" containerID="3211d34ff7fc076e39c080f15fe277422805beb68829af0a0d72887293ee0079" exitCode=0 Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.322906 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-dn9dv" event={"ID":"0d5199a1-1aaa-473c-ab15-80b0fc26f92f","Type":"ContainerDied","Data":"3211d34ff7fc076e39c080f15fe277422805beb68829af0a0d72887293ee0079"} Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.324749 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4618ceff-14a9-4866-aa22-e29767d8d7e4","Type":"ContainerStarted","Data":"7e97ede44f9dcded717a0d3ce3c86515c75392d1854bab5bbd0e148cbdfd9b57"} Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.326371 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4d1e4183-a136-428f-9bd8-e857a603da8f","Type":"ContainerStarted","Data":"93214a0ee47d97a9edbed4bb8424a217ec2d4807de9d6b1d0c950cd38d0e3228"} Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.327733 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"836183b5-4755-4622-a1da-438a1ec0b119","Type":"ContainerStarted","Data":"5bad06863deb49dca58a971e426eae705a40348a0c51708739e6a7e0f391e34d"} Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.328892 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-gm6nb" event={"ID":"f7eb4552-ad08-470d-b4c5-63c937f11717","Type":"ContainerStarted","Data":"0ab9f05a65782af6017be7ca2a9d77511ebf3ff6b198307f140b667e485c331d"} Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.329844 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"15b1a123-3831-4fa6-bc52-3f0cf30953f9","Type":"ContainerStarted","Data":"cfb68ec5584822a1531ad52242e67d1b565587f9a72b028a0705a3db2d003cf3"} Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.331229 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"34532f6a-b213-422d-8126-d74d95c32497","Type":"ContainerStarted","Data":"4651430c60e7f7405e043cbe802bff71dc174764942041ab23b0d2423d4ff3ba"} Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.332454 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-dcv8s" event={"ID":"17f6b0df-4fba-4069-a10e-658776dc40bb","Type":"ContainerDied","Data":"d4f401f3cb1fd47cb0bb1bf47a64cb1b889e727f9fc2b0b40f42713d4a032d2d"} Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.332540 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-dcv8s" Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.334570 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-pfzzn" event={"ID":"beb0033f-9a86-49d8-8508-37742393341e","Type":"ContainerDied","Data":"54e3047176f8ead575a0735c8d75dafb089fe27b6353833c0992d9cd4492a0fc"} Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.334619 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-pfzzn" Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.403611 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-pfzzn"] Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.413567 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-pfzzn"] Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.436834 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-dcv8s"] Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.443571 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-dcv8s"] Jan 30 11:10:57 crc kubenswrapper[4869]: I0130 11:10:57.869854 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 30 11:10:57 crc kubenswrapper[4869]: W0130 11:10:57.880110 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod286d79ce_b123_48b8_b8d1_9a1696fe00bb.slice/crio-d10c9c52d9b87e2ebc1d2082b80b0003e504e9b90cd526415350d7f02616abed WatchSource:0}: Error finding container d10c9c52d9b87e2ebc1d2082b80b0003e504e9b90cd526415350d7f02616abed: Status 404 returned error can't find the container with id d10c9c52d9b87e2ebc1d2082b80b0003e504e9b90cd526415350d7f02616abed Jan 30 11:10:58 crc kubenswrapper[4869]: I0130 11:10:58.155024 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17f6b0df-4fba-4069-a10e-658776dc40bb" path="/var/lib/kubelet/pods/17f6b0df-4fba-4069-a10e-658776dc40bb/volumes" Jan 30 11:10:58 crc kubenswrapper[4869]: I0130 11:10:58.157783 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="beb0033f-9a86-49d8-8508-37742393341e" path="/var/lib/kubelet/pods/beb0033f-9a86-49d8-8508-37742393341e/volumes" Jan 30 11:10:58 crc kubenswrapper[4869]: I0130 11:10:58.344671 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"286d79ce-b123-48b8-b8d1-9a1696fe00bb","Type":"ContainerStarted","Data":"d10c9c52d9b87e2ebc1d2082b80b0003e504e9b90cd526415350d7f02616abed"} Jan 30 11:11:06 crc kubenswrapper[4869]: I0130 11:11:06.410080 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b","Type":"ContainerStarted","Data":"6e3f92f98f69d9675547c36cfbba2bee734bf6e6196d8d62fa53a9ae6f597e60"} Jan 30 11:11:06 crc kubenswrapper[4869]: I0130 11:11:06.410890 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Jan 30 11:11:06 crc kubenswrapper[4869]: I0130 11:11:06.422332 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-ckn8s" event={"ID":"88fa474b-ed48-4ca3-af15-7217e4a9a6df","Type":"ContainerStarted","Data":"20d721ed432621e0f53ece3b312f30178fba820957276d75c5f0c58c3fad8041"} Jan 30 11:11:06 crc kubenswrapper[4869]: I0130 11:11:06.422564 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-ckn8s" Jan 30 11:11:06 crc kubenswrapper[4869]: I0130 11:11:06.432590 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-dn9dv" event={"ID":"0d5199a1-1aaa-473c-ab15-80b0fc26f92f","Type":"ContainerStarted","Data":"3889e8befd7abeedb9a9dabcdffe42678ea25190a26ac7fc84a21b2b095e55e6"} Jan 30 11:11:06 crc kubenswrapper[4869]: I0130 11:11:06.432815 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-dn9dv" Jan 30 11:11:06 crc kubenswrapper[4869]: I0130 11:11:06.433931 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=12.479193133 podStartE2EDuration="21.433910818s" podCreationTimestamp="2026-01-30 11:10:45 +0000 UTC" firstStartedPulling="2026-01-30 11:10:56.063546759 +0000 UTC m=+1006.613422825" lastFinishedPulling="2026-01-30 11:11:05.018264444 +0000 UTC m=+1015.568140510" observedRunningTime="2026-01-30 11:11:06.429654407 +0000 UTC m=+1016.979530473" watchObservedRunningTime="2026-01-30 11:11:06.433910818 +0000 UTC m=+1016.983786904" Jan 30 11:11:06 crc kubenswrapper[4869]: I0130 11:11:06.456377 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-ckn8s" podStartSLOduration=11.333747294 podStartE2EDuration="25.456358936s" podCreationTimestamp="2026-01-30 11:10:41 +0000 UTC" firstStartedPulling="2026-01-30 11:10:42.109511788 +0000 UTC m=+992.659387854" lastFinishedPulling="2026-01-30 11:10:56.23212343 +0000 UTC m=+1006.781999496" observedRunningTime="2026-01-30 11:11:06.454161874 +0000 UTC m=+1017.004037960" watchObservedRunningTime="2026-01-30 11:11:06.456358936 +0000 UTC m=+1017.006235002" Jan 30 11:11:06 crc kubenswrapper[4869]: I0130 11:11:06.493659 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-dn9dv" podStartSLOduration=12.278672898 podStartE2EDuration="26.493632495s" podCreationTimestamp="2026-01-30 11:10:40 +0000 UTC" firstStartedPulling="2026-01-30 11:10:42.009448004 +0000 UTC m=+992.559324070" lastFinishedPulling="2026-01-30 11:10:56.224407601 +0000 UTC m=+1006.774283667" observedRunningTime="2026-01-30 11:11:06.487697187 +0000 UTC m=+1017.037573253" watchObservedRunningTime="2026-01-30 11:11:06.493632495 +0000 UTC m=+1017.043508561" Jan 30 11:11:07 crc kubenswrapper[4869]: I0130 11:11:07.448507 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-jfzdq" event={"ID":"e4264086-12ed-4655-9657-14083653d56d","Type":"ContainerStarted","Data":"bc346a21b4ea582edb1fae9adeb54b86c6065b26a7c7ba55773b410b593e821d"} Jan 30 11:11:07 crc kubenswrapper[4869]: I0130 11:11:07.451244 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4618ceff-14a9-4866-aa22-e29767d8d7e4","Type":"ContainerStarted","Data":"022cd75b7d7edfb330306e68dd74e8e4b7a53321b13225e5a124ef1093f6c767"} Jan 30 11:11:07 crc kubenswrapper[4869]: I0130 11:11:07.454297 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"34532f6a-b213-422d-8126-d74d95c32497","Type":"ContainerStarted","Data":"fcd0228d1f2d35a91d3fd73675fa6d1fade39ed016a110af5ddc57482eaff8f6"} Jan 30 11:11:07 crc kubenswrapper[4869]: I0130 11:11:07.456480 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4d1e4183-a136-428f-9bd8-e857a603da8f","Type":"ContainerStarted","Data":"55eceea13613475cd0e0edba738b2fea582c353eb4608bd2ab58553bd72ccab9"} Jan 30 11:11:07 crc kubenswrapper[4869]: I0130 11:11:07.460044 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"aefb9658-d09a-4e8d-9769-3d6133bd4b2c","Type":"ContainerStarted","Data":"f645ebc573f7dd5869dcdf3ccfab2bce9e8305d65b43a7373e7b2cef92aec27f"} Jan 30 11:11:07 crc kubenswrapper[4869]: I0130 11:11:07.461130 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"836183b5-4755-4622-a1da-438a1ec0b119","Type":"ContainerStarted","Data":"c93c7651e489d8173d3b52e5b5ca98b94c7f94435736b599f71054e104d9bb0f"} Jan 30 11:11:07 crc kubenswrapper[4869]: I0130 11:11:07.461891 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Jan 30 11:11:07 crc kubenswrapper[4869]: I0130 11:11:07.463028 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-gm6nb" event={"ID":"f7eb4552-ad08-470d-b4c5-63c937f11717","Type":"ContainerStarted","Data":"22343d6d2159c59569cdcaeb63be8c1ccfd67f4ea5ab7aea714b76ecce386aea"} Jan 30 11:11:07 crc kubenswrapper[4869]: I0130 11:11:07.463424 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-gm6nb" Jan 30 11:11:07 crc kubenswrapper[4869]: I0130 11:11:07.464540 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"286d79ce-b123-48b8-b8d1-9a1696fe00bb","Type":"ContainerStarted","Data":"ceb1a3a6706024f356a33ef4db537324ff7ad4fc04da03e91359bb65670ed582"} Jan 30 11:11:07 crc kubenswrapper[4869]: I0130 11:11:07.523426 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-gm6nb" podStartSLOduration=7.416580387 podStartE2EDuration="15.523407203s" podCreationTimestamp="2026-01-30 11:10:52 +0000 UTC" firstStartedPulling="2026-01-30 11:10:57.072239158 +0000 UTC m=+1007.622115224" lastFinishedPulling="2026-01-30 11:11:05.179065974 +0000 UTC m=+1015.728942040" observedRunningTime="2026-01-30 11:11:07.514241772 +0000 UTC m=+1018.064117828" watchObservedRunningTime="2026-01-30 11:11:07.523407203 +0000 UTC m=+1018.073283269" Jan 30 11:11:07 crc kubenswrapper[4869]: I0130 11:11:07.560633 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=12.650904516 podStartE2EDuration="21.56060661s" podCreationTimestamp="2026-01-30 11:10:46 +0000 UTC" firstStartedPulling="2026-01-30 11:10:57.063695685 +0000 UTC m=+1007.613571751" lastFinishedPulling="2026-01-30 11:11:05.973397779 +0000 UTC m=+1016.523273845" observedRunningTime="2026-01-30 11:11:07.555207757 +0000 UTC m=+1018.105083823" watchObservedRunningTime="2026-01-30 11:11:07.56060661 +0000 UTC m=+1018.110482666" Jan 30 11:11:08 crc kubenswrapper[4869]: I0130 11:11:08.473053 4869 generic.go:334] "Generic (PLEG): container finished" podID="e4264086-12ed-4655-9657-14083653d56d" containerID="bc346a21b4ea582edb1fae9adeb54b86c6065b26a7c7ba55773b410b593e821d" exitCode=0 Jan 30 11:11:08 crc kubenswrapper[4869]: I0130 11:11:08.473111 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-jfzdq" event={"ID":"e4264086-12ed-4655-9657-14083653d56d","Type":"ContainerDied","Data":"bc346a21b4ea582edb1fae9adeb54b86c6065b26a7c7ba55773b410b593e821d"} Jan 30 11:11:08 crc kubenswrapper[4869]: I0130 11:11:08.475776 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"15b1a123-3831-4fa6-bc52-3f0cf30953f9","Type":"ContainerStarted","Data":"ac6a69179427ff89823caab2c5058cec074bad55265a0d608c121c2402131129"} Jan 30 11:11:09 crc kubenswrapper[4869]: I0130 11:11:09.483754 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-jfzdq" event={"ID":"e4264086-12ed-4655-9657-14083653d56d","Type":"ContainerStarted","Data":"9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021"} Jan 30 11:11:09 crc kubenswrapper[4869]: I0130 11:11:09.484061 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-jfzdq" event={"ID":"e4264086-12ed-4655-9657-14083653d56d","Type":"ContainerStarted","Data":"805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c"} Jan 30 11:11:09 crc kubenswrapper[4869]: I0130 11:11:09.484078 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:11:09 crc kubenswrapper[4869]: I0130 11:11:09.484088 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:11:09 crc kubenswrapper[4869]: I0130 11:11:09.486429 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"aefb9658-d09a-4e8d-9769-3d6133bd4b2c","Type":"ContainerStarted","Data":"9f060107943b0642dfd7e507c493ff833b9b292bb9f38467328dd22ddf77c864"} Jan 30 11:11:09 crc kubenswrapper[4869]: I0130 11:11:09.488441 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"286d79ce-b123-48b8-b8d1-9a1696fe00bb","Type":"ContainerStarted","Data":"9a8f8895d0bd2c0f894fad76153cef03bee6e3dab153bccb600a99368ebe01e6"} Jan 30 11:11:09 crc kubenswrapper[4869]: I0130 11:11:09.508696 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-jfzdq" podStartSLOduration=9.369018508 podStartE2EDuration="17.508676657s" podCreationTimestamp="2026-01-30 11:10:52 +0000 UTC" firstStartedPulling="2026-01-30 11:10:57.039748944 +0000 UTC m=+1007.589625010" lastFinishedPulling="2026-01-30 11:11:05.179407093 +0000 UTC m=+1015.729283159" observedRunningTime="2026-01-30 11:11:09.506008941 +0000 UTC m=+1020.055885007" watchObservedRunningTime="2026-01-30 11:11:09.508676657 +0000 UTC m=+1020.058552723" Jan 30 11:11:09 crc kubenswrapper[4869]: I0130 11:11:09.528342 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=9.905689596 podStartE2EDuration="20.528310654s" podCreationTimestamp="2026-01-30 11:10:49 +0000 UTC" firstStartedPulling="2026-01-30 11:10:57.884601406 +0000 UTC m=+1008.434477472" lastFinishedPulling="2026-01-30 11:11:08.507222464 +0000 UTC m=+1019.057098530" observedRunningTime="2026-01-30 11:11:09.525855695 +0000 UTC m=+1020.075731761" watchObservedRunningTime="2026-01-30 11:11:09.528310654 +0000 UTC m=+1020.078186720" Jan 30 11:11:09 crc kubenswrapper[4869]: I0130 11:11:09.557736 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=5.165213291 podStartE2EDuration="16.55769569s" podCreationTimestamp="2026-01-30 11:10:53 +0000 UTC" firstStartedPulling="2026-01-30 11:10:57.145360686 +0000 UTC m=+1007.695236752" lastFinishedPulling="2026-01-30 11:11:08.537843085 +0000 UTC m=+1019.087719151" observedRunningTime="2026-01-30 11:11:09.552522773 +0000 UTC m=+1020.102398839" watchObservedRunningTime="2026-01-30 11:11:09.55769569 +0000 UTC m=+1020.107571756" Jan 30 11:11:09 crc kubenswrapper[4869]: I0130 11:11:09.851174 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Jan 30 11:11:09 crc kubenswrapper[4869]: I0130 11:11:09.851254 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Jan 30 11:11:10 crc kubenswrapper[4869]: I0130 11:11:10.873094 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Jan 30 11:11:11 crc kubenswrapper[4869]: I0130 11:11:11.283855 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-666b6646f7-dn9dv" Jan 30 11:11:11 crc kubenswrapper[4869]: I0130 11:11:11.503266 4869 generic.go:334] "Generic (PLEG): container finished" podID="4618ceff-14a9-4866-aa22-e29767d8d7e4" containerID="022cd75b7d7edfb330306e68dd74e8e4b7a53321b13225e5a124ef1093f6c767" exitCode=0 Jan 30 11:11:11 crc kubenswrapper[4869]: I0130 11:11:11.503544 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4618ceff-14a9-4866-aa22-e29767d8d7e4","Type":"ContainerDied","Data":"022cd75b7d7edfb330306e68dd74e8e4b7a53321b13225e5a124ef1093f6c767"} Jan 30 11:11:11 crc kubenswrapper[4869]: I0130 11:11:11.511084 4869 generic.go:334] "Generic (PLEG): container finished" podID="34532f6a-b213-422d-8126-d74d95c32497" containerID="fcd0228d1f2d35a91d3fd73675fa6d1fade39ed016a110af5ddc57482eaff8f6" exitCode=0 Jan 30 11:11:11 crc kubenswrapper[4869]: I0130 11:11:11.512308 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"34532f6a-b213-422d-8126-d74d95c32497","Type":"ContainerDied","Data":"fcd0228d1f2d35a91d3fd73675fa6d1fade39ed016a110af5ddc57482eaff8f6"} Jan 30 11:11:11 crc kubenswrapper[4869]: I0130 11:11:11.551025 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57d769cc4f-ckn8s" Jan 30 11:11:11 crc kubenswrapper[4869]: I0130 11:11:11.627931 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-dn9dv"] Jan 30 11:11:11 crc kubenswrapper[4869]: I0130 11:11:11.628242 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-dn9dv" podUID="0d5199a1-1aaa-473c-ab15-80b0fc26f92f" containerName="dnsmasq-dns" containerID="cri-o://3889e8befd7abeedb9a9dabcdffe42678ea25190a26ac7fc84a21b2b095e55e6" gracePeriod=10 Jan 30 11:11:11 crc kubenswrapper[4869]: I0130 11:11:11.873013 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Jan 30 11:11:11 crc kubenswrapper[4869]: I0130 11:11:11.932581 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.021195 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-dn9dv" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.108632 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-258rh\" (UniqueName: \"kubernetes.io/projected/0d5199a1-1aaa-473c-ab15-80b0fc26f92f-kube-api-access-258rh\") pod \"0d5199a1-1aaa-473c-ab15-80b0fc26f92f\" (UID: \"0d5199a1-1aaa-473c-ab15-80b0fc26f92f\") " Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.108801 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d5199a1-1aaa-473c-ab15-80b0fc26f92f-config\") pod \"0d5199a1-1aaa-473c-ab15-80b0fc26f92f\" (UID: \"0d5199a1-1aaa-473c-ab15-80b0fc26f92f\") " Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.108863 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0d5199a1-1aaa-473c-ab15-80b0fc26f92f-dns-svc\") pod \"0d5199a1-1aaa-473c-ab15-80b0fc26f92f\" (UID: \"0d5199a1-1aaa-473c-ab15-80b0fc26f92f\") " Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.114424 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d5199a1-1aaa-473c-ab15-80b0fc26f92f-kube-api-access-258rh" (OuterVolumeSpecName: "kube-api-access-258rh") pod "0d5199a1-1aaa-473c-ab15-80b0fc26f92f" (UID: "0d5199a1-1aaa-473c-ab15-80b0fc26f92f"). InnerVolumeSpecName "kube-api-access-258rh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.150449 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d5199a1-1aaa-473c-ab15-80b0fc26f92f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0d5199a1-1aaa-473c-ab15-80b0fc26f92f" (UID: "0d5199a1-1aaa-473c-ab15-80b0fc26f92f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.153943 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d5199a1-1aaa-473c-ab15-80b0fc26f92f-config" (OuterVolumeSpecName: "config") pod "0d5199a1-1aaa-473c-ab15-80b0fc26f92f" (UID: "0d5199a1-1aaa-473c-ab15-80b0fc26f92f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.210754 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0d5199a1-1aaa-473c-ab15-80b0fc26f92f-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.210795 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-258rh\" (UniqueName: \"kubernetes.io/projected/0d5199a1-1aaa-473c-ab15-80b0fc26f92f-kube-api-access-258rh\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.210810 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d5199a1-1aaa-473c-ab15-80b0fc26f92f-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.528696 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4618ceff-14a9-4866-aa22-e29767d8d7e4","Type":"ContainerStarted","Data":"ab6fefef94e67c7669176c91b03cf31715872e1eba1e24a159531ceb1c264993"} Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.530804 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"34532f6a-b213-422d-8126-d74d95c32497","Type":"ContainerStarted","Data":"b0b2edafa16cdb86c907a900b6834383a3f251d3d25ba3b9fa14aabedce02a03"} Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.534172 4869 generic.go:334] "Generic (PLEG): container finished" podID="0d5199a1-1aaa-473c-ab15-80b0fc26f92f" containerID="3889e8befd7abeedb9a9dabcdffe42678ea25190a26ac7fc84a21b2b095e55e6" exitCode=0 Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.534233 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-dn9dv" event={"ID":"0d5199a1-1aaa-473c-ab15-80b0fc26f92f","Type":"ContainerDied","Data":"3889e8befd7abeedb9a9dabcdffe42678ea25190a26ac7fc84a21b2b095e55e6"} Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.534342 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-dn9dv" event={"ID":"0d5199a1-1aaa-473c-ab15-80b0fc26f92f","Type":"ContainerDied","Data":"fd938a97381568b714913f416557e9c981d40415c096fa746cc07cc047d75276"} Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.534368 4869 scope.go:117] "RemoveContainer" containerID="3889e8befd7abeedb9a9dabcdffe42678ea25190a26ac7fc84a21b2b095e55e6" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.534252 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-dn9dv" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.552739 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=19.931474976 podStartE2EDuration="28.552695812s" podCreationTimestamp="2026-01-30 11:10:44 +0000 UTC" firstStartedPulling="2026-01-30 11:10:56.895278768 +0000 UTC m=+1007.445154834" lastFinishedPulling="2026-01-30 11:11:05.516499604 +0000 UTC m=+1016.066375670" observedRunningTime="2026-01-30 11:11:12.54841464 +0000 UTC m=+1023.098290726" watchObservedRunningTime="2026-01-30 11:11:12.552695812 +0000 UTC m=+1023.102571888" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.577303 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=22.157108148 podStartE2EDuration="30.57728238s" podCreationTimestamp="2026-01-30 11:10:42 +0000 UTC" firstStartedPulling="2026-01-30 11:10:56.759219401 +0000 UTC m=+1007.309095467" lastFinishedPulling="2026-01-30 11:11:05.179393633 +0000 UTC m=+1015.729269699" observedRunningTime="2026-01-30 11:11:12.571077244 +0000 UTC m=+1023.120953320" watchObservedRunningTime="2026-01-30 11:11:12.57728238 +0000 UTC m=+1023.127158446" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.584010 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.588952 4869 scope.go:117] "RemoveContainer" containerID="3211d34ff7fc076e39c080f15fe277422805beb68829af0a0d72887293ee0079" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.593441 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-dn9dv"] Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.610983 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-dn9dv"] Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.627981 4869 scope.go:117] "RemoveContainer" containerID="3889e8befd7abeedb9a9dabcdffe42678ea25190a26ac7fc84a21b2b095e55e6" Jan 30 11:11:12 crc kubenswrapper[4869]: E0130 11:11:12.628514 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3889e8befd7abeedb9a9dabcdffe42678ea25190a26ac7fc84a21b2b095e55e6\": container with ID starting with 3889e8befd7abeedb9a9dabcdffe42678ea25190a26ac7fc84a21b2b095e55e6 not found: ID does not exist" containerID="3889e8befd7abeedb9a9dabcdffe42678ea25190a26ac7fc84a21b2b095e55e6" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.628564 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3889e8befd7abeedb9a9dabcdffe42678ea25190a26ac7fc84a21b2b095e55e6"} err="failed to get container status \"3889e8befd7abeedb9a9dabcdffe42678ea25190a26ac7fc84a21b2b095e55e6\": rpc error: code = NotFound desc = could not find container \"3889e8befd7abeedb9a9dabcdffe42678ea25190a26ac7fc84a21b2b095e55e6\": container with ID starting with 3889e8befd7abeedb9a9dabcdffe42678ea25190a26ac7fc84a21b2b095e55e6 not found: ID does not exist" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.628591 4869 scope.go:117] "RemoveContainer" containerID="3211d34ff7fc076e39c080f15fe277422805beb68829af0a0d72887293ee0079" Jan 30 11:11:12 crc kubenswrapper[4869]: E0130 11:11:12.629291 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3211d34ff7fc076e39c080f15fe277422805beb68829af0a0d72887293ee0079\": container with ID starting with 3211d34ff7fc076e39c080f15fe277422805beb68829af0a0d72887293ee0079 not found: ID does not exist" containerID="3211d34ff7fc076e39c080f15fe277422805beb68829af0a0d72887293ee0079" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.629330 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3211d34ff7fc076e39c080f15fe277422805beb68829af0a0d72887293ee0079"} err="failed to get container status \"3211d34ff7fc076e39c080f15fe277422805beb68829af0a0d72887293ee0079\": rpc error: code = NotFound desc = could not find container \"3211d34ff7fc076e39c080f15fe277422805beb68829af0a0d72887293ee0079\": container with ID starting with 3211d34ff7fc076e39c080f15fe277422805beb68829af0a0d72887293ee0079 not found: ID does not exist" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.826076 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-c92k9"] Jan 30 11:11:12 crc kubenswrapper[4869]: E0130 11:11:12.826425 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d5199a1-1aaa-473c-ab15-80b0fc26f92f" containerName="init" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.826464 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d5199a1-1aaa-473c-ab15-80b0fc26f92f" containerName="init" Jan 30 11:11:12 crc kubenswrapper[4869]: E0130 11:11:12.826505 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d5199a1-1aaa-473c-ab15-80b0fc26f92f" containerName="dnsmasq-dns" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.826512 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d5199a1-1aaa-473c-ab15-80b0fc26f92f" containerName="dnsmasq-dns" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.826662 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d5199a1-1aaa-473c-ab15-80b0fc26f92f" containerName="dnsmasq-dns" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.827441 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-c92k9" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.829492 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.857070 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-c92k9"] Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.919072 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.922655 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7td2\" (UniqueName: \"kubernetes.io/projected/390ee913-84e6-487b-b7fe-b09471268a47-kube-api-access-x7td2\") pod \"dnsmasq-dns-5bf47b49b7-c92k9\" (UID: \"390ee913-84e6-487b-b7fe-b09471268a47\") " pod="openstack/dnsmasq-dns-5bf47b49b7-c92k9" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.922817 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/390ee913-84e6-487b-b7fe-b09471268a47-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-c92k9\" (UID: \"390ee913-84e6-487b-b7fe-b09471268a47\") " pod="openstack/dnsmasq-dns-5bf47b49b7-c92k9" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.922890 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/390ee913-84e6-487b-b7fe-b09471268a47-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-c92k9\" (UID: \"390ee913-84e6-487b-b7fe-b09471268a47\") " pod="openstack/dnsmasq-dns-5bf47b49b7-c92k9" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.922978 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/390ee913-84e6-487b-b7fe-b09471268a47-config\") pod \"dnsmasq-dns-5bf47b49b7-c92k9\" (UID: \"390ee913-84e6-487b-b7fe-b09471268a47\") " pod="openstack/dnsmasq-dns-5bf47b49b7-c92k9" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.932039 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-jrjbc"] Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.936489 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-jrjbc" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.938592 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.948318 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-jrjbc"] Jan 30 11:11:12 crc kubenswrapper[4869]: I0130 11:11:12.969153 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.025350 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/333bf862-5ea9-43df-926f-5d8e463b2c80-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-jrjbc\" (UID: \"333bf862-5ea9-43df-926f-5d8e463b2c80\") " pod="openstack/ovn-controller-metrics-jrjbc" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.025455 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/390ee913-84e6-487b-b7fe-b09471268a47-config\") pod \"dnsmasq-dns-5bf47b49b7-c92k9\" (UID: \"390ee913-84e6-487b-b7fe-b09471268a47\") " pod="openstack/dnsmasq-dns-5bf47b49b7-c92k9" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.025486 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/333bf862-5ea9-43df-926f-5d8e463b2c80-combined-ca-bundle\") pod \"ovn-controller-metrics-jrjbc\" (UID: \"333bf862-5ea9-43df-926f-5d8e463b2c80\") " pod="openstack/ovn-controller-metrics-jrjbc" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.025510 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lcvl\" (UniqueName: \"kubernetes.io/projected/333bf862-5ea9-43df-926f-5d8e463b2c80-kube-api-access-8lcvl\") pod \"ovn-controller-metrics-jrjbc\" (UID: \"333bf862-5ea9-43df-926f-5d8e463b2c80\") " pod="openstack/ovn-controller-metrics-jrjbc" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.025592 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/333bf862-5ea9-43df-926f-5d8e463b2c80-config\") pod \"ovn-controller-metrics-jrjbc\" (UID: \"333bf862-5ea9-43df-926f-5d8e463b2c80\") " pod="openstack/ovn-controller-metrics-jrjbc" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.025653 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7td2\" (UniqueName: \"kubernetes.io/projected/390ee913-84e6-487b-b7fe-b09471268a47-kube-api-access-x7td2\") pod \"dnsmasq-dns-5bf47b49b7-c92k9\" (UID: \"390ee913-84e6-487b-b7fe-b09471268a47\") " pod="openstack/dnsmasq-dns-5bf47b49b7-c92k9" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.025674 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/333bf862-5ea9-43df-926f-5d8e463b2c80-ovn-rundir\") pod \"ovn-controller-metrics-jrjbc\" (UID: \"333bf862-5ea9-43df-926f-5d8e463b2c80\") " pod="openstack/ovn-controller-metrics-jrjbc" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.025777 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/333bf862-5ea9-43df-926f-5d8e463b2c80-ovs-rundir\") pod \"ovn-controller-metrics-jrjbc\" (UID: \"333bf862-5ea9-43df-926f-5d8e463b2c80\") " pod="openstack/ovn-controller-metrics-jrjbc" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.025818 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/390ee913-84e6-487b-b7fe-b09471268a47-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-c92k9\" (UID: \"390ee913-84e6-487b-b7fe-b09471268a47\") " pod="openstack/dnsmasq-dns-5bf47b49b7-c92k9" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.025856 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/390ee913-84e6-487b-b7fe-b09471268a47-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-c92k9\" (UID: \"390ee913-84e6-487b-b7fe-b09471268a47\") " pod="openstack/dnsmasq-dns-5bf47b49b7-c92k9" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.026640 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/390ee913-84e6-487b-b7fe-b09471268a47-config\") pod \"dnsmasq-dns-5bf47b49b7-c92k9\" (UID: \"390ee913-84e6-487b-b7fe-b09471268a47\") " pod="openstack/dnsmasq-dns-5bf47b49b7-c92k9" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.026642 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/390ee913-84e6-487b-b7fe-b09471268a47-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-c92k9\" (UID: \"390ee913-84e6-487b-b7fe-b09471268a47\") " pod="openstack/dnsmasq-dns-5bf47b49b7-c92k9" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.027498 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/390ee913-84e6-487b-b7fe-b09471268a47-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-c92k9\" (UID: \"390ee913-84e6-487b-b7fe-b09471268a47\") " pod="openstack/dnsmasq-dns-5bf47b49b7-c92k9" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.051412 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7td2\" (UniqueName: \"kubernetes.io/projected/390ee913-84e6-487b-b7fe-b09471268a47-kube-api-access-x7td2\") pod \"dnsmasq-dns-5bf47b49b7-c92k9\" (UID: \"390ee913-84e6-487b-b7fe-b09471268a47\") " pod="openstack/dnsmasq-dns-5bf47b49b7-c92k9" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.126995 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/333bf862-5ea9-43df-926f-5d8e463b2c80-config\") pod \"ovn-controller-metrics-jrjbc\" (UID: \"333bf862-5ea9-43df-926f-5d8e463b2c80\") " pod="openstack/ovn-controller-metrics-jrjbc" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.127052 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/333bf862-5ea9-43df-926f-5d8e463b2c80-ovn-rundir\") pod \"ovn-controller-metrics-jrjbc\" (UID: \"333bf862-5ea9-43df-926f-5d8e463b2c80\") " pod="openstack/ovn-controller-metrics-jrjbc" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.127129 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/333bf862-5ea9-43df-926f-5d8e463b2c80-ovs-rundir\") pod \"ovn-controller-metrics-jrjbc\" (UID: \"333bf862-5ea9-43df-926f-5d8e463b2c80\") " pod="openstack/ovn-controller-metrics-jrjbc" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.127202 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/333bf862-5ea9-43df-926f-5d8e463b2c80-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-jrjbc\" (UID: \"333bf862-5ea9-43df-926f-5d8e463b2c80\") " pod="openstack/ovn-controller-metrics-jrjbc" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.127251 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/333bf862-5ea9-43df-926f-5d8e463b2c80-combined-ca-bundle\") pod \"ovn-controller-metrics-jrjbc\" (UID: \"333bf862-5ea9-43df-926f-5d8e463b2c80\") " pod="openstack/ovn-controller-metrics-jrjbc" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.127274 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lcvl\" (UniqueName: \"kubernetes.io/projected/333bf862-5ea9-43df-926f-5d8e463b2c80-kube-api-access-8lcvl\") pod \"ovn-controller-metrics-jrjbc\" (UID: \"333bf862-5ea9-43df-926f-5d8e463b2c80\") " pod="openstack/ovn-controller-metrics-jrjbc" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.128768 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/333bf862-5ea9-43df-926f-5d8e463b2c80-config\") pod \"ovn-controller-metrics-jrjbc\" (UID: \"333bf862-5ea9-43df-926f-5d8e463b2c80\") " pod="openstack/ovn-controller-metrics-jrjbc" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.129506 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/333bf862-5ea9-43df-926f-5d8e463b2c80-ovs-rundir\") pod \"ovn-controller-metrics-jrjbc\" (UID: \"333bf862-5ea9-43df-926f-5d8e463b2c80\") " pod="openstack/ovn-controller-metrics-jrjbc" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.129510 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/333bf862-5ea9-43df-926f-5d8e463b2c80-ovn-rundir\") pod \"ovn-controller-metrics-jrjbc\" (UID: \"333bf862-5ea9-43df-926f-5d8e463b2c80\") " pod="openstack/ovn-controller-metrics-jrjbc" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.132361 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/333bf862-5ea9-43df-926f-5d8e463b2c80-combined-ca-bundle\") pod \"ovn-controller-metrics-jrjbc\" (UID: \"333bf862-5ea9-43df-926f-5d8e463b2c80\") " pod="openstack/ovn-controller-metrics-jrjbc" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.132696 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/333bf862-5ea9-43df-926f-5d8e463b2c80-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-jrjbc\" (UID: \"333bf862-5ea9-43df-926f-5d8e463b2c80\") " pod="openstack/ovn-controller-metrics-jrjbc" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.143206 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lcvl\" (UniqueName: \"kubernetes.io/projected/333bf862-5ea9-43df-926f-5d8e463b2c80-kube-api-access-8lcvl\") pod \"ovn-controller-metrics-jrjbc\" (UID: \"333bf862-5ea9-43df-926f-5d8e463b2c80\") " pod="openstack/ovn-controller-metrics-jrjbc" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.158853 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-c92k9" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.256386 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-jrjbc" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.313312 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.315259 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.317032 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.317978 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.318155 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.318403 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-9bjpm" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.339894 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.340753 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7f59r\" (UniqueName: \"kubernetes.io/projected/493ac356-9bec-4285-850c-8e3c7739641e-kube-api-access-7f59r\") pod \"ovn-northd-0\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " pod="openstack/ovn-northd-0" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.340830 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/493ac356-9bec-4285-850c-8e3c7739641e-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " pod="openstack/ovn-northd-0" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.340862 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/493ac356-9bec-4285-850c-8e3c7739641e-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " pod="openstack/ovn-northd-0" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.340891 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/493ac356-9bec-4285-850c-8e3c7739641e-config\") pod \"ovn-northd-0\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " pod="openstack/ovn-northd-0" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.340916 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/493ac356-9bec-4285-850c-8e3c7739641e-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " pod="openstack/ovn-northd-0" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.340938 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/493ac356-9bec-4285-850c-8e3c7739641e-scripts\") pod \"ovn-northd-0\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " pod="openstack/ovn-northd-0" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.340962 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/493ac356-9bec-4285-850c-8e3c7739641e-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " pod="openstack/ovn-northd-0" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.342681 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-c92k9"] Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.396271 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8554648995-9z5zb"] Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.397987 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-9z5zb" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.401083 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.412849 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-9z5zb"] Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.441971 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/493ac356-9bec-4285-850c-8e3c7739641e-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " pod="openstack/ovn-northd-0" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.442027 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/493ac356-9bec-4285-850c-8e3c7739641e-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " pod="openstack/ovn-northd-0" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.442054 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7s5mr\" (UniqueName: \"kubernetes.io/projected/2eda1acf-a7b5-4353-a277-9ae907164424-kube-api-access-7s5mr\") pod \"dnsmasq-dns-8554648995-9z5zb\" (UID: \"2eda1acf-a7b5-4353-a277-9ae907164424\") " pod="openstack/dnsmasq-dns-8554648995-9z5zb" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.442080 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/493ac356-9bec-4285-850c-8e3c7739641e-config\") pod \"ovn-northd-0\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " pod="openstack/ovn-northd-0" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.442110 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/493ac356-9bec-4285-850c-8e3c7739641e-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " pod="openstack/ovn-northd-0" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.442133 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2eda1acf-a7b5-4353-a277-9ae907164424-dns-svc\") pod \"dnsmasq-dns-8554648995-9z5zb\" (UID: \"2eda1acf-a7b5-4353-a277-9ae907164424\") " pod="openstack/dnsmasq-dns-8554648995-9z5zb" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.442148 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2eda1acf-a7b5-4353-a277-9ae907164424-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-9z5zb\" (UID: \"2eda1acf-a7b5-4353-a277-9ae907164424\") " pod="openstack/dnsmasq-dns-8554648995-9z5zb" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.442167 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2eda1acf-a7b5-4353-a277-9ae907164424-config\") pod \"dnsmasq-dns-8554648995-9z5zb\" (UID: \"2eda1acf-a7b5-4353-a277-9ae907164424\") " pod="openstack/dnsmasq-dns-8554648995-9z5zb" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.442183 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/493ac356-9bec-4285-850c-8e3c7739641e-scripts\") pod \"ovn-northd-0\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " pod="openstack/ovn-northd-0" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.442206 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/493ac356-9bec-4285-850c-8e3c7739641e-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " pod="openstack/ovn-northd-0" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.442234 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7f59r\" (UniqueName: \"kubernetes.io/projected/493ac356-9bec-4285-850c-8e3c7739641e-kube-api-access-7f59r\") pod \"ovn-northd-0\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " pod="openstack/ovn-northd-0" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.442268 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2eda1acf-a7b5-4353-a277-9ae907164424-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-9z5zb\" (UID: \"2eda1acf-a7b5-4353-a277-9ae907164424\") " pod="openstack/dnsmasq-dns-8554648995-9z5zb" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.442425 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/493ac356-9bec-4285-850c-8e3c7739641e-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " pod="openstack/ovn-northd-0" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.443061 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/493ac356-9bec-4285-850c-8e3c7739641e-config\") pod \"ovn-northd-0\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " pod="openstack/ovn-northd-0" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.443393 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/493ac356-9bec-4285-850c-8e3c7739641e-scripts\") pod \"ovn-northd-0\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " pod="openstack/ovn-northd-0" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.446286 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/493ac356-9bec-4285-850c-8e3c7739641e-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " pod="openstack/ovn-northd-0" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.447082 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/493ac356-9bec-4285-850c-8e3c7739641e-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " pod="openstack/ovn-northd-0" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.448489 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/493ac356-9bec-4285-850c-8e3c7739641e-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " pod="openstack/ovn-northd-0" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.463531 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7f59r\" (UniqueName: \"kubernetes.io/projected/493ac356-9bec-4285-850c-8e3c7739641e-kube-api-access-7f59r\") pod \"ovn-northd-0\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " pod="openstack/ovn-northd-0" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.544025 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2eda1acf-a7b5-4353-a277-9ae907164424-dns-svc\") pod \"dnsmasq-dns-8554648995-9z5zb\" (UID: \"2eda1acf-a7b5-4353-a277-9ae907164424\") " pod="openstack/dnsmasq-dns-8554648995-9z5zb" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.544087 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2eda1acf-a7b5-4353-a277-9ae907164424-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-9z5zb\" (UID: \"2eda1acf-a7b5-4353-a277-9ae907164424\") " pod="openstack/dnsmasq-dns-8554648995-9z5zb" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.544247 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2eda1acf-a7b5-4353-a277-9ae907164424-config\") pod \"dnsmasq-dns-8554648995-9z5zb\" (UID: \"2eda1acf-a7b5-4353-a277-9ae907164424\") " pod="openstack/dnsmasq-dns-8554648995-9z5zb" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.544334 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2eda1acf-a7b5-4353-a277-9ae907164424-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-9z5zb\" (UID: \"2eda1acf-a7b5-4353-a277-9ae907164424\") " pod="openstack/dnsmasq-dns-8554648995-9z5zb" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.544424 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7s5mr\" (UniqueName: \"kubernetes.io/projected/2eda1acf-a7b5-4353-a277-9ae907164424-kube-api-access-7s5mr\") pod \"dnsmasq-dns-8554648995-9z5zb\" (UID: \"2eda1acf-a7b5-4353-a277-9ae907164424\") " pod="openstack/dnsmasq-dns-8554648995-9z5zb" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.545498 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2eda1acf-a7b5-4353-a277-9ae907164424-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-9z5zb\" (UID: \"2eda1acf-a7b5-4353-a277-9ae907164424\") " pod="openstack/dnsmasq-dns-8554648995-9z5zb" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.545523 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2eda1acf-a7b5-4353-a277-9ae907164424-dns-svc\") pod \"dnsmasq-dns-8554648995-9z5zb\" (UID: \"2eda1acf-a7b5-4353-a277-9ae907164424\") " pod="openstack/dnsmasq-dns-8554648995-9z5zb" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.545657 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2eda1acf-a7b5-4353-a277-9ae907164424-config\") pod \"dnsmasq-dns-8554648995-9z5zb\" (UID: \"2eda1acf-a7b5-4353-a277-9ae907164424\") " pod="openstack/dnsmasq-dns-8554648995-9z5zb" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.546085 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2eda1acf-a7b5-4353-a277-9ae907164424-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-9z5zb\" (UID: \"2eda1acf-a7b5-4353-a277-9ae907164424\") " pod="openstack/dnsmasq-dns-8554648995-9z5zb" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.569871 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7s5mr\" (UniqueName: \"kubernetes.io/projected/2eda1acf-a7b5-4353-a277-9ae907164424-kube-api-access-7s5mr\") pod \"dnsmasq-dns-8554648995-9z5zb\" (UID: \"2eda1acf-a7b5-4353-a277-9ae907164424\") " pod="openstack/dnsmasq-dns-8554648995-9z5zb" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.662035 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.698189 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-c92k9"] Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.717460 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-9z5zb" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.809308 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-jrjbc"] Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.953856 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Jan 30 11:11:13 crc kubenswrapper[4869]: I0130 11:11:13.953967 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Jan 30 11:11:14 crc kubenswrapper[4869]: I0130 11:11:14.145843 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d5199a1-1aaa-473c-ab15-80b0fc26f92f" path="/var/lib/kubelet/pods/0d5199a1-1aaa-473c-ab15-80b0fc26f92f/volumes" Jan 30 11:11:14 crc kubenswrapper[4869]: I0130 11:11:14.146979 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 30 11:11:14 crc kubenswrapper[4869]: W0130 11:11:14.148568 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod493ac356_9bec_4285_850c_8e3c7739641e.slice/crio-e1a91e455ba523d7c830ca0009e54d665729b34b1a3bbc738c851fd1b7d766d6 WatchSource:0}: Error finding container e1a91e455ba523d7c830ca0009e54d665729b34b1a3bbc738c851fd1b7d766d6: Status 404 returned error can't find the container with id e1a91e455ba523d7c830ca0009e54d665729b34b1a3bbc738c851fd1b7d766d6 Jan 30 11:11:14 crc kubenswrapper[4869]: I0130 11:11:14.227666 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-9z5zb"] Jan 30 11:11:14 crc kubenswrapper[4869]: W0130 11:11:14.229771 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2eda1acf_a7b5_4353_a277_9ae907164424.slice/crio-0874d5a1018612e89f1c9ded9a4952be2efe4628037a21c4279f3cb8492d3658 WatchSource:0}: Error finding container 0874d5a1018612e89f1c9ded9a4952be2efe4628037a21c4279f3cb8492d3658: Status 404 returned error can't find the container with id 0874d5a1018612e89f1c9ded9a4952be2efe4628037a21c4279f3cb8492d3658 Jan 30 11:11:14 crc kubenswrapper[4869]: I0130 11:11:14.554475 4869 generic.go:334] "Generic (PLEG): container finished" podID="390ee913-84e6-487b-b7fe-b09471268a47" containerID="9431790515f32854ac6fbd60f84e70b6928ac5d44a84a249c1026d7e353f2e51" exitCode=0 Jan 30 11:11:14 crc kubenswrapper[4869]: I0130 11:11:14.554827 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-c92k9" event={"ID":"390ee913-84e6-487b-b7fe-b09471268a47","Type":"ContainerDied","Data":"9431790515f32854ac6fbd60f84e70b6928ac5d44a84a249c1026d7e353f2e51"} Jan 30 11:11:14 crc kubenswrapper[4869]: I0130 11:11:14.554886 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-c92k9" event={"ID":"390ee913-84e6-487b-b7fe-b09471268a47","Type":"ContainerStarted","Data":"9a6664f466d853534e3e93d6f24d3aa5345917375fb6f138f606667e5bfdc6cd"} Jan 30 11:11:14 crc kubenswrapper[4869]: I0130 11:11:14.556168 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"493ac356-9bec-4285-850c-8e3c7739641e","Type":"ContainerStarted","Data":"e1a91e455ba523d7c830ca0009e54d665729b34b1a3bbc738c851fd1b7d766d6"} Jan 30 11:11:14 crc kubenswrapper[4869]: I0130 11:11:14.557661 4869 generic.go:334] "Generic (PLEG): container finished" podID="2eda1acf-a7b5-4353-a277-9ae907164424" containerID="0122c7f4651c246f2d412cb9fdd20b5cdcc4a92019d56dc8a1765feeb6840411" exitCode=0 Jan 30 11:11:14 crc kubenswrapper[4869]: I0130 11:11:14.557759 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-9z5zb" event={"ID":"2eda1acf-a7b5-4353-a277-9ae907164424","Type":"ContainerDied","Data":"0122c7f4651c246f2d412cb9fdd20b5cdcc4a92019d56dc8a1765feeb6840411"} Jan 30 11:11:14 crc kubenswrapper[4869]: I0130 11:11:14.558231 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-9z5zb" event={"ID":"2eda1acf-a7b5-4353-a277-9ae907164424","Type":"ContainerStarted","Data":"0874d5a1018612e89f1c9ded9a4952be2efe4628037a21c4279f3cb8492d3658"} Jan 30 11:11:14 crc kubenswrapper[4869]: I0130 11:11:14.563538 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-jrjbc" event={"ID":"333bf862-5ea9-43df-926f-5d8e463b2c80","Type":"ContainerStarted","Data":"8dd5852d63fc179fcf40f8c40a3a27e1e4ca18f73ee8966299e8329c0ac8d776"} Jan 30 11:11:14 crc kubenswrapper[4869]: I0130 11:11:14.563596 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-jrjbc" event={"ID":"333bf862-5ea9-43df-926f-5d8e463b2c80","Type":"ContainerStarted","Data":"6fd34c93cb0396b2d042d97c363690853e0249d5664e9373de8228ced96fe825"} Jan 30 11:11:14 crc kubenswrapper[4869]: I0130 11:11:14.658688 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-jrjbc" podStartSLOduration=2.6586654960000002 podStartE2EDuration="2.658665496s" podCreationTimestamp="2026-01-30 11:11:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:11:14.623481916 +0000 UTC m=+1025.173358002" watchObservedRunningTime="2026-01-30 11:11:14.658665496 +0000 UTC m=+1025.208541562" Jan 30 11:11:14 crc kubenswrapper[4869]: I0130 11:11:14.864980 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-c92k9" Jan 30 11:11:14 crc kubenswrapper[4869]: I0130 11:11:14.987440 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/390ee913-84e6-487b-b7fe-b09471268a47-ovsdbserver-nb\") pod \"390ee913-84e6-487b-b7fe-b09471268a47\" (UID: \"390ee913-84e6-487b-b7fe-b09471268a47\") " Jan 30 11:11:14 crc kubenswrapper[4869]: I0130 11:11:14.987577 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/390ee913-84e6-487b-b7fe-b09471268a47-dns-svc\") pod \"390ee913-84e6-487b-b7fe-b09471268a47\" (UID: \"390ee913-84e6-487b-b7fe-b09471268a47\") " Jan 30 11:11:14 crc kubenswrapper[4869]: I0130 11:11:14.987618 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/390ee913-84e6-487b-b7fe-b09471268a47-config\") pod \"390ee913-84e6-487b-b7fe-b09471268a47\" (UID: \"390ee913-84e6-487b-b7fe-b09471268a47\") " Jan 30 11:11:14 crc kubenswrapper[4869]: I0130 11:11:14.987834 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7td2\" (UniqueName: \"kubernetes.io/projected/390ee913-84e6-487b-b7fe-b09471268a47-kube-api-access-x7td2\") pod \"390ee913-84e6-487b-b7fe-b09471268a47\" (UID: \"390ee913-84e6-487b-b7fe-b09471268a47\") " Jan 30 11:11:14 crc kubenswrapper[4869]: I0130 11:11:14.991690 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/390ee913-84e6-487b-b7fe-b09471268a47-kube-api-access-x7td2" (OuterVolumeSpecName: "kube-api-access-x7td2") pod "390ee913-84e6-487b-b7fe-b09471268a47" (UID: "390ee913-84e6-487b-b7fe-b09471268a47"). InnerVolumeSpecName "kube-api-access-x7td2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:11:15 crc kubenswrapper[4869]: I0130 11:11:15.013755 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/390ee913-84e6-487b-b7fe-b09471268a47-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "390ee913-84e6-487b-b7fe-b09471268a47" (UID: "390ee913-84e6-487b-b7fe-b09471268a47"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:11:15 crc kubenswrapper[4869]: I0130 11:11:15.014859 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/390ee913-84e6-487b-b7fe-b09471268a47-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "390ee913-84e6-487b-b7fe-b09471268a47" (UID: "390ee913-84e6-487b-b7fe-b09471268a47"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:11:15 crc kubenswrapper[4869]: I0130 11:11:15.017156 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/390ee913-84e6-487b-b7fe-b09471268a47-config" (OuterVolumeSpecName: "config") pod "390ee913-84e6-487b-b7fe-b09471268a47" (UID: "390ee913-84e6-487b-b7fe-b09471268a47"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:11:15 crc kubenswrapper[4869]: I0130 11:11:15.090209 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/390ee913-84e6-487b-b7fe-b09471268a47-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:15 crc kubenswrapper[4869]: I0130 11:11:15.090253 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/390ee913-84e6-487b-b7fe-b09471268a47-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:15 crc kubenswrapper[4869]: I0130 11:11:15.090266 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/390ee913-84e6-487b-b7fe-b09471268a47-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:15 crc kubenswrapper[4869]: I0130 11:11:15.090282 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7td2\" (UniqueName: \"kubernetes.io/projected/390ee913-84e6-487b-b7fe-b09471268a47-kube-api-access-x7td2\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:15 crc kubenswrapper[4869]: I0130 11:11:15.457834 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Jan 30 11:11:15 crc kubenswrapper[4869]: I0130 11:11:15.457889 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Jan 30 11:11:15 crc kubenswrapper[4869]: I0130 11:11:15.573193 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"493ac356-9bec-4285-850c-8e3c7739641e","Type":"ContainerStarted","Data":"86b7a253a11330df89e12fcb2cd867c724eb1b70d728e6ac436f033ef2e552e4"} Jan 30 11:11:15 crc kubenswrapper[4869]: I0130 11:11:15.573245 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"493ac356-9bec-4285-850c-8e3c7739641e","Type":"ContainerStarted","Data":"f692aea75d830feaf6c4d4138b6ab879aba7ddaa94979a62ad5644fdd0c6ddf5"} Jan 30 11:11:15 crc kubenswrapper[4869]: I0130 11:11:15.573356 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Jan 30 11:11:15 crc kubenswrapper[4869]: I0130 11:11:15.575997 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-9z5zb" event={"ID":"2eda1acf-a7b5-4353-a277-9ae907164424","Type":"ContainerStarted","Data":"b871b1984590ea3ae25e8c5872897907190d247db97c452ed5604c3b4eb3addb"} Jan 30 11:11:15 crc kubenswrapper[4869]: I0130 11:11:15.576155 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8554648995-9z5zb" Jan 30 11:11:15 crc kubenswrapper[4869]: I0130 11:11:15.577447 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-c92k9" event={"ID":"390ee913-84e6-487b-b7fe-b09471268a47","Type":"ContainerDied","Data":"9a6664f466d853534e3e93d6f24d3aa5345917375fb6f138f606667e5bfdc6cd"} Jan 30 11:11:15 crc kubenswrapper[4869]: I0130 11:11:15.577466 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-c92k9" Jan 30 11:11:15 crc kubenswrapper[4869]: I0130 11:11:15.577485 4869 scope.go:117] "RemoveContainer" containerID="9431790515f32854ac6fbd60f84e70b6928ac5d44a84a249c1026d7e353f2e51" Jan 30 11:11:15 crc kubenswrapper[4869]: I0130 11:11:15.598571 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=1.518003958 podStartE2EDuration="2.598545168s" podCreationTimestamp="2026-01-30 11:11:13 +0000 UTC" firstStartedPulling="2026-01-30 11:11:14.150885774 +0000 UTC m=+1024.700761840" lastFinishedPulling="2026-01-30 11:11:15.231426994 +0000 UTC m=+1025.781303050" observedRunningTime="2026-01-30 11:11:15.596572992 +0000 UTC m=+1026.146449058" watchObservedRunningTime="2026-01-30 11:11:15.598545168 +0000 UTC m=+1026.148421234" Jan 30 11:11:15 crc kubenswrapper[4869]: I0130 11:11:15.624734 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8554648995-9z5zb" podStartSLOduration=2.624673891 podStartE2EDuration="2.624673891s" podCreationTimestamp="2026-01-30 11:11:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:11:15.619171775 +0000 UTC m=+1026.169047851" watchObservedRunningTime="2026-01-30 11:11:15.624673891 +0000 UTC m=+1026.174549967" Jan 30 11:11:15 crc kubenswrapper[4869]: I0130 11:11:15.660079 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-c92k9"] Jan 30 11:11:15 crc kubenswrapper[4869]: I0130 11:11:15.666845 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-c92k9"] Jan 30 11:11:15 crc kubenswrapper[4869]: I0130 11:11:15.724599 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Jan 30 11:11:16 crc kubenswrapper[4869]: I0130 11:11:16.142319 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="390ee913-84e6-487b-b7fe-b09471268a47" path="/var/lib/kubelet/pods/390ee913-84e6-487b-b7fe-b09471268a47/volumes" Jan 30 11:11:16 crc kubenswrapper[4869]: I0130 11:11:16.500964 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Jan 30 11:11:16 crc kubenswrapper[4869]: I0130 11:11:16.573056 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Jan 30 11:11:17 crc kubenswrapper[4869]: I0130 11:11:17.212582 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-9z5zb"] Jan 30 11:11:17 crc kubenswrapper[4869]: I0130 11:11:17.239932 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Jan 30 11:11:17 crc kubenswrapper[4869]: I0130 11:11:17.346517 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-gddbs"] Jan 30 11:11:17 crc kubenswrapper[4869]: E0130 11:11:17.346972 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="390ee913-84e6-487b-b7fe-b09471268a47" containerName="init" Jan 30 11:11:17 crc kubenswrapper[4869]: I0130 11:11:17.346996 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="390ee913-84e6-487b-b7fe-b09471268a47" containerName="init" Jan 30 11:11:17 crc kubenswrapper[4869]: I0130 11:11:17.347216 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="390ee913-84e6-487b-b7fe-b09471268a47" containerName="init" Jan 30 11:11:17 crc kubenswrapper[4869]: I0130 11:11:17.348268 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" Jan 30 11:11:17 crc kubenswrapper[4869]: I0130 11:11:17.378232 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-gddbs"] Jan 30 11:11:17 crc kubenswrapper[4869]: I0130 11:11:17.439633 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-542cp\" (UniqueName: \"kubernetes.io/projected/58fab5d7-620a-47aa-9df0-35e587e79318-kube-api-access-542cp\") pod \"dnsmasq-dns-b8fbc5445-gddbs\" (UID: \"58fab5d7-620a-47aa-9df0-35e587e79318\") " pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" Jan 30 11:11:17 crc kubenswrapper[4869]: I0130 11:11:17.439736 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58fab5d7-620a-47aa-9df0-35e587e79318-config\") pod \"dnsmasq-dns-b8fbc5445-gddbs\" (UID: \"58fab5d7-620a-47aa-9df0-35e587e79318\") " pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" Jan 30 11:11:17 crc kubenswrapper[4869]: I0130 11:11:17.439779 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/58fab5d7-620a-47aa-9df0-35e587e79318-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-gddbs\" (UID: \"58fab5d7-620a-47aa-9df0-35e587e79318\") " pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" Jan 30 11:11:17 crc kubenswrapper[4869]: I0130 11:11:17.439810 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/58fab5d7-620a-47aa-9df0-35e587e79318-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-gddbs\" (UID: \"58fab5d7-620a-47aa-9df0-35e587e79318\") " pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" Jan 30 11:11:17 crc kubenswrapper[4869]: I0130 11:11:17.439852 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/58fab5d7-620a-47aa-9df0-35e587e79318-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-gddbs\" (UID: \"58fab5d7-620a-47aa-9df0-35e587e79318\") " pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" Jan 30 11:11:17 crc kubenswrapper[4869]: I0130 11:11:17.541783 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/58fab5d7-620a-47aa-9df0-35e587e79318-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-gddbs\" (UID: \"58fab5d7-620a-47aa-9df0-35e587e79318\") " pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" Jan 30 11:11:17 crc kubenswrapper[4869]: I0130 11:11:17.541858 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/58fab5d7-620a-47aa-9df0-35e587e79318-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-gddbs\" (UID: \"58fab5d7-620a-47aa-9df0-35e587e79318\") " pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" Jan 30 11:11:17 crc kubenswrapper[4869]: I0130 11:11:17.541937 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-542cp\" (UniqueName: \"kubernetes.io/projected/58fab5d7-620a-47aa-9df0-35e587e79318-kube-api-access-542cp\") pod \"dnsmasq-dns-b8fbc5445-gddbs\" (UID: \"58fab5d7-620a-47aa-9df0-35e587e79318\") " pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" Jan 30 11:11:17 crc kubenswrapper[4869]: I0130 11:11:17.541959 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58fab5d7-620a-47aa-9df0-35e587e79318-config\") pod \"dnsmasq-dns-b8fbc5445-gddbs\" (UID: \"58fab5d7-620a-47aa-9df0-35e587e79318\") " pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" Jan 30 11:11:17 crc kubenswrapper[4869]: I0130 11:11:17.542014 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/58fab5d7-620a-47aa-9df0-35e587e79318-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-gddbs\" (UID: \"58fab5d7-620a-47aa-9df0-35e587e79318\") " pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" Jan 30 11:11:17 crc kubenswrapper[4869]: I0130 11:11:17.542844 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/58fab5d7-620a-47aa-9df0-35e587e79318-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-gddbs\" (UID: \"58fab5d7-620a-47aa-9df0-35e587e79318\") " pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" Jan 30 11:11:17 crc kubenswrapper[4869]: I0130 11:11:17.543459 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/58fab5d7-620a-47aa-9df0-35e587e79318-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-gddbs\" (UID: \"58fab5d7-620a-47aa-9df0-35e587e79318\") " pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" Jan 30 11:11:17 crc kubenswrapper[4869]: I0130 11:11:17.544024 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/58fab5d7-620a-47aa-9df0-35e587e79318-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-gddbs\" (UID: \"58fab5d7-620a-47aa-9df0-35e587e79318\") " pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" Jan 30 11:11:17 crc kubenswrapper[4869]: I0130 11:11:17.544794 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58fab5d7-620a-47aa-9df0-35e587e79318-config\") pod \"dnsmasq-dns-b8fbc5445-gddbs\" (UID: \"58fab5d7-620a-47aa-9df0-35e587e79318\") " pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" Jan 30 11:11:17 crc kubenswrapper[4869]: I0130 11:11:17.571167 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-542cp\" (UniqueName: \"kubernetes.io/projected/58fab5d7-620a-47aa-9df0-35e587e79318-kube-api-access-542cp\") pod \"dnsmasq-dns-b8fbc5445-gddbs\" (UID: \"58fab5d7-620a-47aa-9df0-35e587e79318\") " pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" Jan 30 11:11:17 crc kubenswrapper[4869]: I0130 11:11:17.591793 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8554648995-9z5zb" podUID="2eda1acf-a7b5-4353-a277-9ae907164424" containerName="dnsmasq-dns" containerID="cri-o://b871b1984590ea3ae25e8c5872897907190d247db97c452ed5604c3b4eb3addb" gracePeriod=10 Jan 30 11:11:17 crc kubenswrapper[4869]: I0130 11:11:17.666795 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.084514 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-9z5zb" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.213778 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-gddbs"] Jan 30 11:11:18 crc kubenswrapper[4869]: W0130 11:11:18.217724 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod58fab5d7_620a_47aa_9df0_35e587e79318.slice/crio-53d92479d0c328fb0b34da50da76a6c3b7f8459fa1c071e30580e93f0e9e2667 WatchSource:0}: Error finding container 53d92479d0c328fb0b34da50da76a6c3b7f8459fa1c071e30580e93f0e9e2667: Status 404 returned error can't find the container with id 53d92479d0c328fb0b34da50da76a6c3b7f8459fa1c071e30580e93f0e9e2667 Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.259424 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2eda1acf-a7b5-4353-a277-9ae907164424-ovsdbserver-sb\") pod \"2eda1acf-a7b5-4353-a277-9ae907164424\" (UID: \"2eda1acf-a7b5-4353-a277-9ae907164424\") " Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.259895 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2eda1acf-a7b5-4353-a277-9ae907164424-config\") pod \"2eda1acf-a7b5-4353-a277-9ae907164424\" (UID: \"2eda1acf-a7b5-4353-a277-9ae907164424\") " Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.260199 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2eda1acf-a7b5-4353-a277-9ae907164424-dns-svc\") pod \"2eda1acf-a7b5-4353-a277-9ae907164424\" (UID: \"2eda1acf-a7b5-4353-a277-9ae907164424\") " Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.260341 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7s5mr\" (UniqueName: \"kubernetes.io/projected/2eda1acf-a7b5-4353-a277-9ae907164424-kube-api-access-7s5mr\") pod \"2eda1acf-a7b5-4353-a277-9ae907164424\" (UID: \"2eda1acf-a7b5-4353-a277-9ae907164424\") " Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.260504 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2eda1acf-a7b5-4353-a277-9ae907164424-ovsdbserver-nb\") pod \"2eda1acf-a7b5-4353-a277-9ae907164424\" (UID: \"2eda1acf-a7b5-4353-a277-9ae907164424\") " Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.263538 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2eda1acf-a7b5-4353-a277-9ae907164424-kube-api-access-7s5mr" (OuterVolumeSpecName: "kube-api-access-7s5mr") pod "2eda1acf-a7b5-4353-a277-9ae907164424" (UID: "2eda1acf-a7b5-4353-a277-9ae907164424"). InnerVolumeSpecName "kube-api-access-7s5mr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.299460 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2eda1acf-a7b5-4353-a277-9ae907164424-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2eda1acf-a7b5-4353-a277-9ae907164424" (UID: "2eda1acf-a7b5-4353-a277-9ae907164424"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.300444 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2eda1acf-a7b5-4353-a277-9ae907164424-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2eda1acf-a7b5-4353-a277-9ae907164424" (UID: "2eda1acf-a7b5-4353-a277-9ae907164424"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.304650 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2eda1acf-a7b5-4353-a277-9ae907164424-config" (OuterVolumeSpecName: "config") pod "2eda1acf-a7b5-4353-a277-9ae907164424" (UID: "2eda1acf-a7b5-4353-a277-9ae907164424"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.306833 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2eda1acf-a7b5-4353-a277-9ae907164424-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2eda1acf-a7b5-4353-a277-9ae907164424" (UID: "2eda1acf-a7b5-4353-a277-9ae907164424"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.363225 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2eda1acf-a7b5-4353-a277-9ae907164424-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.363265 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7s5mr\" (UniqueName: \"kubernetes.io/projected/2eda1acf-a7b5-4353-a277-9ae907164424-kube-api-access-7s5mr\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.363278 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2eda1acf-a7b5-4353-a277-9ae907164424-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.363287 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2eda1acf-a7b5-4353-a277-9ae907164424-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.363295 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2eda1acf-a7b5-4353-a277-9ae907164424-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.522279 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Jan 30 11:11:18 crc kubenswrapper[4869]: E0130 11:11:18.522766 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2eda1acf-a7b5-4353-a277-9ae907164424" containerName="init" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.522791 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="2eda1acf-a7b5-4353-a277-9ae907164424" containerName="init" Jan 30 11:11:18 crc kubenswrapper[4869]: E0130 11:11:18.522831 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2eda1acf-a7b5-4353-a277-9ae907164424" containerName="dnsmasq-dns" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.522839 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="2eda1acf-a7b5-4353-a277-9ae907164424" containerName="dnsmasq-dns" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.523039 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="2eda1acf-a7b5-4353-a277-9ae907164424" containerName="dnsmasq-dns" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.530682 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.534425 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.534496 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-rkrzz" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.534512 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.534426 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.544903 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.599494 4869 generic.go:334] "Generic (PLEG): container finished" podID="2eda1acf-a7b5-4353-a277-9ae907164424" containerID="b871b1984590ea3ae25e8c5872897907190d247db97c452ed5604c3b4eb3addb" exitCode=0 Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.599550 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-9z5zb" event={"ID":"2eda1acf-a7b5-4353-a277-9ae907164424","Type":"ContainerDied","Data":"b871b1984590ea3ae25e8c5872897907190d247db97c452ed5604c3b4eb3addb"} Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.599580 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-9z5zb" event={"ID":"2eda1acf-a7b5-4353-a277-9ae907164424","Type":"ContainerDied","Data":"0874d5a1018612e89f1c9ded9a4952be2efe4628037a21c4279f3cb8492d3658"} Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.599599 4869 scope.go:117] "RemoveContainer" containerID="b871b1984590ea3ae25e8c5872897907190d247db97c452ed5604c3b4eb3addb" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.599723 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-9z5zb" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.610173 4869 generic.go:334] "Generic (PLEG): container finished" podID="58fab5d7-620a-47aa-9df0-35e587e79318" containerID="732d9e658eb07f2f4a62814c0b91bd858365ec70a789dec7f657f1b41f5769f8" exitCode=0 Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.610214 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" event={"ID":"58fab5d7-620a-47aa-9df0-35e587e79318","Type":"ContainerDied","Data":"732d9e658eb07f2f4a62814c0b91bd858365ec70a789dec7f657f1b41f5769f8"} Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.610281 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" event={"ID":"58fab5d7-620a-47aa-9df0-35e587e79318","Type":"ContainerStarted","Data":"53d92479d0c328fb0b34da50da76a6c3b7f8459fa1c071e30580e93f0e9e2667"} Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.629118 4869 scope.go:117] "RemoveContainer" containerID="0122c7f4651c246f2d412cb9fdd20b5cdcc4a92019d56dc8a1765feeb6840411" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.655546 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-9z5zb"] Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.661649 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8554648995-9z5zb"] Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.668836 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " pod="openstack/swift-storage-0" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.668935 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-etc-swift\") pod \"swift-storage-0\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " pod="openstack/swift-storage-0" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.668993 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/7b1a0e46-1fb4-4ab1-9417-cba939546529-cache\") pod \"swift-storage-0\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " pod="openstack/swift-storage-0" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.669057 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/7b1a0e46-1fb4-4ab1-9417-cba939546529-lock\") pod \"swift-storage-0\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " pod="openstack/swift-storage-0" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.669076 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b1a0e46-1fb4-4ab1-9417-cba939546529-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " pod="openstack/swift-storage-0" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.669102 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zv5l\" (UniqueName: \"kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-kube-api-access-8zv5l\") pod \"swift-storage-0\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " pod="openstack/swift-storage-0" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.709414 4869 scope.go:117] "RemoveContainer" containerID="b871b1984590ea3ae25e8c5872897907190d247db97c452ed5604c3b4eb3addb" Jan 30 11:11:18 crc kubenswrapper[4869]: E0130 11:11:18.709948 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b871b1984590ea3ae25e8c5872897907190d247db97c452ed5604c3b4eb3addb\": container with ID starting with b871b1984590ea3ae25e8c5872897907190d247db97c452ed5604c3b4eb3addb not found: ID does not exist" containerID="b871b1984590ea3ae25e8c5872897907190d247db97c452ed5604c3b4eb3addb" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.709981 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b871b1984590ea3ae25e8c5872897907190d247db97c452ed5604c3b4eb3addb"} err="failed to get container status \"b871b1984590ea3ae25e8c5872897907190d247db97c452ed5604c3b4eb3addb\": rpc error: code = NotFound desc = could not find container \"b871b1984590ea3ae25e8c5872897907190d247db97c452ed5604c3b4eb3addb\": container with ID starting with b871b1984590ea3ae25e8c5872897907190d247db97c452ed5604c3b4eb3addb not found: ID does not exist" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.710004 4869 scope.go:117] "RemoveContainer" containerID="0122c7f4651c246f2d412cb9fdd20b5cdcc4a92019d56dc8a1765feeb6840411" Jan 30 11:11:18 crc kubenswrapper[4869]: E0130 11:11:18.710528 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0122c7f4651c246f2d412cb9fdd20b5cdcc4a92019d56dc8a1765feeb6840411\": container with ID starting with 0122c7f4651c246f2d412cb9fdd20b5cdcc4a92019d56dc8a1765feeb6840411 not found: ID does not exist" containerID="0122c7f4651c246f2d412cb9fdd20b5cdcc4a92019d56dc8a1765feeb6840411" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.710670 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0122c7f4651c246f2d412cb9fdd20b5cdcc4a92019d56dc8a1765feeb6840411"} err="failed to get container status \"0122c7f4651c246f2d412cb9fdd20b5cdcc4a92019d56dc8a1765feeb6840411\": rpc error: code = NotFound desc = could not find container \"0122c7f4651c246f2d412cb9fdd20b5cdcc4a92019d56dc8a1765feeb6840411\": container with ID starting with 0122c7f4651c246f2d412cb9fdd20b5cdcc4a92019d56dc8a1765feeb6840411 not found: ID does not exist" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.770118 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/7b1a0e46-1fb4-4ab1-9417-cba939546529-lock\") pod \"swift-storage-0\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " pod="openstack/swift-storage-0" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.770399 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b1a0e46-1fb4-4ab1-9417-cba939546529-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " pod="openstack/swift-storage-0" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.770442 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zv5l\" (UniqueName: \"kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-kube-api-access-8zv5l\") pod \"swift-storage-0\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " pod="openstack/swift-storage-0" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.770462 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " pod="openstack/swift-storage-0" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.770496 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-etc-swift\") pod \"swift-storage-0\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " pod="openstack/swift-storage-0" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.770553 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/7b1a0e46-1fb4-4ab1-9417-cba939546529-cache\") pod \"swift-storage-0\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " pod="openstack/swift-storage-0" Jan 30 11:11:18 crc kubenswrapper[4869]: E0130 11:11:18.771095 4869 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 30 11:11:18 crc kubenswrapper[4869]: E0130 11:11:18.771131 4869 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 30 11:11:18 crc kubenswrapper[4869]: E0130 11:11:18.771192 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-etc-swift podName:7b1a0e46-1fb4-4ab1-9417-cba939546529 nodeName:}" failed. No retries permitted until 2026-01-30 11:11:19.271171078 +0000 UTC m=+1029.821047214 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-etc-swift") pod "swift-storage-0" (UID: "7b1a0e46-1fb4-4ab1-9417-cba939546529") : configmap "swift-ring-files" not found Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.771244 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/swift-storage-0" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.771272 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/7b1a0e46-1fb4-4ab1-9417-cba939546529-cache\") pod \"swift-storage-0\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " pod="openstack/swift-storage-0" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.771804 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/7b1a0e46-1fb4-4ab1-9417-cba939546529-lock\") pod \"swift-storage-0\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " pod="openstack/swift-storage-0" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.776407 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b1a0e46-1fb4-4ab1-9417-cba939546529-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " pod="openstack/swift-storage-0" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.789196 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zv5l\" (UniqueName: \"kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-kube-api-access-8zv5l\") pod \"swift-storage-0\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " pod="openstack/swift-storage-0" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.797517 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " pod="openstack/swift-storage-0" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.878910 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-57wlq"] Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.880099 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-57wlq" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.883801 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.883803 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.886226 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.893942 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-57wlq"] Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.921994 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-57wlq"] Jan 30 11:11:18 crc kubenswrapper[4869]: E0130 11:11:18.922675 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-x6m8p ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/swift-ring-rebalance-57wlq" podUID="45307b7b-ca4c-4f46-9a4b-256df49bb080" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.952758 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-kx76b"] Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.954037 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-kx76b" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.972629 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/45307b7b-ca4c-4f46-9a4b-256df49bb080-swiftconf\") pod \"swift-ring-rebalance-57wlq\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " pod="openstack/swift-ring-rebalance-57wlq" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.972678 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6m8p\" (UniqueName: \"kubernetes.io/projected/45307b7b-ca4c-4f46-9a4b-256df49bb080-kube-api-access-x6m8p\") pod \"swift-ring-rebalance-57wlq\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " pod="openstack/swift-ring-rebalance-57wlq" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.972701 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/45307b7b-ca4c-4f46-9a4b-256df49bb080-dispersionconf\") pod \"swift-ring-rebalance-57wlq\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " pod="openstack/swift-ring-rebalance-57wlq" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.972872 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/45307b7b-ca4c-4f46-9a4b-256df49bb080-ring-data-devices\") pod \"swift-ring-rebalance-57wlq\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " pod="openstack/swift-ring-rebalance-57wlq" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.972999 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45307b7b-ca4c-4f46-9a4b-256df49bb080-combined-ca-bundle\") pod \"swift-ring-rebalance-57wlq\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " pod="openstack/swift-ring-rebalance-57wlq" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.973121 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/45307b7b-ca4c-4f46-9a4b-256df49bb080-etc-swift\") pod \"swift-ring-rebalance-57wlq\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " pod="openstack/swift-ring-rebalance-57wlq" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.973224 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/45307b7b-ca4c-4f46-9a4b-256df49bb080-scripts\") pod \"swift-ring-rebalance-57wlq\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " pod="openstack/swift-ring-rebalance-57wlq" Jan 30 11:11:18 crc kubenswrapper[4869]: I0130 11:11:18.976126 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-kx76b"] Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.075016 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6m8p\" (UniqueName: \"kubernetes.io/projected/45307b7b-ca4c-4f46-9a4b-256df49bb080-kube-api-access-x6m8p\") pod \"swift-ring-rebalance-57wlq\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " pod="openstack/swift-ring-rebalance-57wlq" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.075368 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/45307b7b-ca4c-4f46-9a4b-256df49bb080-dispersionconf\") pod \"swift-ring-rebalance-57wlq\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " pod="openstack/swift-ring-rebalance-57wlq" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.075522 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/db3f3820-1e34-4ad4-bb89-b587355526a6-dispersionconf\") pod \"swift-ring-rebalance-kx76b\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " pod="openstack/swift-ring-rebalance-kx76b" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.075628 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db3f3820-1e34-4ad4-bb89-b587355526a6-scripts\") pod \"swift-ring-rebalance-kx76b\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " pod="openstack/swift-ring-rebalance-kx76b" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.075771 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ln6dx\" (UniqueName: \"kubernetes.io/projected/db3f3820-1e34-4ad4-bb89-b587355526a6-kube-api-access-ln6dx\") pod \"swift-ring-rebalance-kx76b\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " pod="openstack/swift-ring-rebalance-kx76b" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.075975 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/db3f3820-1e34-4ad4-bb89-b587355526a6-swiftconf\") pod \"swift-ring-rebalance-kx76b\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " pod="openstack/swift-ring-rebalance-kx76b" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.076053 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/45307b7b-ca4c-4f46-9a4b-256df49bb080-ring-data-devices\") pod \"swift-ring-rebalance-57wlq\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " pod="openstack/swift-ring-rebalance-57wlq" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.076144 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45307b7b-ca4c-4f46-9a4b-256df49bb080-combined-ca-bundle\") pod \"swift-ring-rebalance-57wlq\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " pod="openstack/swift-ring-rebalance-57wlq" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.076179 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/db3f3820-1e34-4ad4-bb89-b587355526a6-ring-data-devices\") pod \"swift-ring-rebalance-kx76b\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " pod="openstack/swift-ring-rebalance-kx76b" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.076236 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/45307b7b-ca4c-4f46-9a4b-256df49bb080-etc-swift\") pod \"swift-ring-rebalance-57wlq\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " pod="openstack/swift-ring-rebalance-57wlq" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.076332 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db3f3820-1e34-4ad4-bb89-b587355526a6-combined-ca-bundle\") pod \"swift-ring-rebalance-kx76b\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " pod="openstack/swift-ring-rebalance-kx76b" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.076578 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/45307b7b-ca4c-4f46-9a4b-256df49bb080-scripts\") pod \"swift-ring-rebalance-57wlq\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " pod="openstack/swift-ring-rebalance-57wlq" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.076672 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/45307b7b-ca4c-4f46-9a4b-256df49bb080-swiftconf\") pod \"swift-ring-rebalance-57wlq\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " pod="openstack/swift-ring-rebalance-57wlq" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.076769 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/db3f3820-1e34-4ad4-bb89-b587355526a6-etc-swift\") pod \"swift-ring-rebalance-kx76b\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " pod="openstack/swift-ring-rebalance-kx76b" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.077832 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/45307b7b-ca4c-4f46-9a4b-256df49bb080-etc-swift\") pod \"swift-ring-rebalance-57wlq\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " pod="openstack/swift-ring-rebalance-57wlq" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.078111 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/45307b7b-ca4c-4f46-9a4b-256df49bb080-ring-data-devices\") pod \"swift-ring-rebalance-57wlq\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " pod="openstack/swift-ring-rebalance-57wlq" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.078722 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/45307b7b-ca4c-4f46-9a4b-256df49bb080-scripts\") pod \"swift-ring-rebalance-57wlq\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " pod="openstack/swift-ring-rebalance-57wlq" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.080429 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/45307b7b-ca4c-4f46-9a4b-256df49bb080-dispersionconf\") pod \"swift-ring-rebalance-57wlq\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " pod="openstack/swift-ring-rebalance-57wlq" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.080926 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/45307b7b-ca4c-4f46-9a4b-256df49bb080-swiftconf\") pod \"swift-ring-rebalance-57wlq\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " pod="openstack/swift-ring-rebalance-57wlq" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.081919 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45307b7b-ca4c-4f46-9a4b-256df49bb080-combined-ca-bundle\") pod \"swift-ring-rebalance-57wlq\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " pod="openstack/swift-ring-rebalance-57wlq" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.095262 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6m8p\" (UniqueName: \"kubernetes.io/projected/45307b7b-ca4c-4f46-9a4b-256df49bb080-kube-api-access-x6m8p\") pod \"swift-ring-rebalance-57wlq\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " pod="openstack/swift-ring-rebalance-57wlq" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.178324 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/db3f3820-1e34-4ad4-bb89-b587355526a6-dispersionconf\") pod \"swift-ring-rebalance-kx76b\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " pod="openstack/swift-ring-rebalance-kx76b" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.178373 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db3f3820-1e34-4ad4-bb89-b587355526a6-scripts\") pod \"swift-ring-rebalance-kx76b\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " pod="openstack/swift-ring-rebalance-kx76b" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.178449 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ln6dx\" (UniqueName: \"kubernetes.io/projected/db3f3820-1e34-4ad4-bb89-b587355526a6-kube-api-access-ln6dx\") pod \"swift-ring-rebalance-kx76b\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " pod="openstack/swift-ring-rebalance-kx76b" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.178817 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/db3f3820-1e34-4ad4-bb89-b587355526a6-swiftconf\") pod \"swift-ring-rebalance-kx76b\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " pod="openstack/swift-ring-rebalance-kx76b" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.179151 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db3f3820-1e34-4ad4-bb89-b587355526a6-scripts\") pod \"swift-ring-rebalance-kx76b\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " pod="openstack/swift-ring-rebalance-kx76b" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.179420 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/db3f3820-1e34-4ad4-bb89-b587355526a6-ring-data-devices\") pod \"swift-ring-rebalance-kx76b\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " pod="openstack/swift-ring-rebalance-kx76b" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.179469 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db3f3820-1e34-4ad4-bb89-b587355526a6-combined-ca-bundle\") pod \"swift-ring-rebalance-kx76b\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " pod="openstack/swift-ring-rebalance-kx76b" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.179533 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/db3f3820-1e34-4ad4-bb89-b587355526a6-etc-swift\") pod \"swift-ring-rebalance-kx76b\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " pod="openstack/swift-ring-rebalance-kx76b" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.179971 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/db3f3820-1e34-4ad4-bb89-b587355526a6-etc-swift\") pod \"swift-ring-rebalance-kx76b\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " pod="openstack/swift-ring-rebalance-kx76b" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.180054 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/db3f3820-1e34-4ad4-bb89-b587355526a6-ring-data-devices\") pod \"swift-ring-rebalance-kx76b\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " pod="openstack/swift-ring-rebalance-kx76b" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.182218 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/db3f3820-1e34-4ad4-bb89-b587355526a6-swiftconf\") pod \"swift-ring-rebalance-kx76b\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " pod="openstack/swift-ring-rebalance-kx76b" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.182699 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/db3f3820-1e34-4ad4-bb89-b587355526a6-dispersionconf\") pod \"swift-ring-rebalance-kx76b\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " pod="openstack/swift-ring-rebalance-kx76b" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.183372 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db3f3820-1e34-4ad4-bb89-b587355526a6-combined-ca-bundle\") pod \"swift-ring-rebalance-kx76b\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " pod="openstack/swift-ring-rebalance-kx76b" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.197119 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ln6dx\" (UniqueName: \"kubernetes.io/projected/db3f3820-1e34-4ad4-bb89-b587355526a6-kube-api-access-ln6dx\") pod \"swift-ring-rebalance-kx76b\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " pod="openstack/swift-ring-rebalance-kx76b" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.273144 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-kx76b" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.281254 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-etc-swift\") pod \"swift-storage-0\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " pod="openstack/swift-storage-0" Jan 30 11:11:19 crc kubenswrapper[4869]: E0130 11:11:19.281443 4869 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 30 11:11:19 crc kubenswrapper[4869]: E0130 11:11:19.281468 4869 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 30 11:11:19 crc kubenswrapper[4869]: E0130 11:11:19.281519 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-etc-swift podName:7b1a0e46-1fb4-4ab1-9417-cba939546529 nodeName:}" failed. No retries permitted until 2026-01-30 11:11:20.281505013 +0000 UTC m=+1030.831381079 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-etc-swift") pod "swift-storage-0" (UID: "7b1a0e46-1fb4-4ab1-9417-cba939546529") : configmap "swift-ring-files" not found Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.621923 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-57wlq" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.622562 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" event={"ID":"58fab5d7-620a-47aa-9df0-35e587e79318","Type":"ContainerStarted","Data":"68d49cec397a34f793d27a79e235f23845370377aefe0db471de78c183dae9c8"} Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.622654 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.632386 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-57wlq" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.716690 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" podStartSLOduration=2.716675421 podStartE2EDuration="2.716675421s" podCreationTimestamp="2026-01-30 11:11:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:11:19.657373455 +0000 UTC m=+1030.207249521" watchObservedRunningTime="2026-01-30 11:11:19.716675421 +0000 UTC m=+1030.266551487" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.720794 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-kx76b"] Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.787722 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/45307b7b-ca4c-4f46-9a4b-256df49bb080-dispersionconf\") pod \"45307b7b-ca4c-4f46-9a4b-256df49bb080\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.787782 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6m8p\" (UniqueName: \"kubernetes.io/projected/45307b7b-ca4c-4f46-9a4b-256df49bb080-kube-api-access-x6m8p\") pod \"45307b7b-ca4c-4f46-9a4b-256df49bb080\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.787853 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/45307b7b-ca4c-4f46-9a4b-256df49bb080-etc-swift\") pod \"45307b7b-ca4c-4f46-9a4b-256df49bb080\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.787881 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/45307b7b-ca4c-4f46-9a4b-256df49bb080-scripts\") pod \"45307b7b-ca4c-4f46-9a4b-256df49bb080\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.787907 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/45307b7b-ca4c-4f46-9a4b-256df49bb080-ring-data-devices\") pod \"45307b7b-ca4c-4f46-9a4b-256df49bb080\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.788014 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45307b7b-ca4c-4f46-9a4b-256df49bb080-combined-ca-bundle\") pod \"45307b7b-ca4c-4f46-9a4b-256df49bb080\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.788053 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/45307b7b-ca4c-4f46-9a4b-256df49bb080-swiftconf\") pod \"45307b7b-ca4c-4f46-9a4b-256df49bb080\" (UID: \"45307b7b-ca4c-4f46-9a4b-256df49bb080\") " Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.788461 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45307b7b-ca4c-4f46-9a4b-256df49bb080-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "45307b7b-ca4c-4f46-9a4b-256df49bb080" (UID: "45307b7b-ca4c-4f46-9a4b-256df49bb080"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.788603 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45307b7b-ca4c-4f46-9a4b-256df49bb080-scripts" (OuterVolumeSpecName: "scripts") pod "45307b7b-ca4c-4f46-9a4b-256df49bb080" (UID: "45307b7b-ca4c-4f46-9a4b-256df49bb080"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.788615 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45307b7b-ca4c-4f46-9a4b-256df49bb080-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "45307b7b-ca4c-4f46-9a4b-256df49bb080" (UID: "45307b7b-ca4c-4f46-9a4b-256df49bb080"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.788944 4869 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/45307b7b-ca4c-4f46-9a4b-256df49bb080-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.788964 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/45307b7b-ca4c-4f46-9a4b-256df49bb080-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.788974 4869 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/45307b7b-ca4c-4f46-9a4b-256df49bb080-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.793070 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45307b7b-ca4c-4f46-9a4b-256df49bb080-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "45307b7b-ca4c-4f46-9a4b-256df49bb080" (UID: "45307b7b-ca4c-4f46-9a4b-256df49bb080"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.793123 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45307b7b-ca4c-4f46-9a4b-256df49bb080-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "45307b7b-ca4c-4f46-9a4b-256df49bb080" (UID: "45307b7b-ca4c-4f46-9a4b-256df49bb080"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.801926 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45307b7b-ca4c-4f46-9a4b-256df49bb080-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "45307b7b-ca4c-4f46-9a4b-256df49bb080" (UID: "45307b7b-ca4c-4f46-9a4b-256df49bb080"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.812403 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45307b7b-ca4c-4f46-9a4b-256df49bb080-kube-api-access-x6m8p" (OuterVolumeSpecName: "kube-api-access-x6m8p") pod "45307b7b-ca4c-4f46-9a4b-256df49bb080" (UID: "45307b7b-ca4c-4f46-9a4b-256df49bb080"). InnerVolumeSpecName "kube-api-access-x6m8p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.890488 4869 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/45307b7b-ca4c-4f46-9a4b-256df49bb080-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.890529 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6m8p\" (UniqueName: \"kubernetes.io/projected/45307b7b-ca4c-4f46-9a4b-256df49bb080-kube-api-access-x6m8p\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.890541 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45307b7b-ca4c-4f46-9a4b-256df49bb080-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:19 crc kubenswrapper[4869]: I0130 11:11:19.890550 4869 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/45307b7b-ca4c-4f46-9a4b-256df49bb080-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:20 crc kubenswrapper[4869]: I0130 11:11:20.144057 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2eda1acf-a7b5-4353-a277-9ae907164424" path="/var/lib/kubelet/pods/2eda1acf-a7b5-4353-a277-9ae907164424/volumes" Jan 30 11:11:20 crc kubenswrapper[4869]: I0130 11:11:20.151583 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Jan 30 11:11:20 crc kubenswrapper[4869]: I0130 11:11:20.224881 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Jan 30 11:11:20 crc kubenswrapper[4869]: I0130 11:11:20.295623 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-etc-swift\") pod \"swift-storage-0\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " pod="openstack/swift-storage-0" Jan 30 11:11:20 crc kubenswrapper[4869]: E0130 11:11:20.295810 4869 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 30 11:11:20 crc kubenswrapper[4869]: E0130 11:11:20.295829 4869 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 30 11:11:20 crc kubenswrapper[4869]: E0130 11:11:20.295873 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-etc-swift podName:7b1a0e46-1fb4-4ab1-9417-cba939546529 nodeName:}" failed. No retries permitted until 2026-01-30 11:11:22.295857691 +0000 UTC m=+1032.845733757 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-etc-swift") pod "swift-storage-0" (UID: "7b1a0e46-1fb4-4ab1-9417-cba939546529") : configmap "swift-ring-files" not found Jan 30 11:11:20 crc kubenswrapper[4869]: I0130 11:11:20.630091 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-kx76b" event={"ID":"db3f3820-1e34-4ad4-bb89-b587355526a6","Type":"ContainerStarted","Data":"ea8c2efef7bc769295859afb8e74822e177c9121dddcdff74ceddd199649a5f2"} Jan 30 11:11:20 crc kubenswrapper[4869]: I0130 11:11:20.630119 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-57wlq" Jan 30 11:11:20 crc kubenswrapper[4869]: I0130 11:11:20.675413 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-57wlq"] Jan 30 11:11:20 crc kubenswrapper[4869]: I0130 11:11:20.683696 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-57wlq"] Jan 30 11:11:21 crc kubenswrapper[4869]: I0130 11:11:21.769662 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:11:21 crc kubenswrapper[4869]: I0130 11:11:21.770007 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:11:22 crc kubenswrapper[4869]: I0130 11:11:22.141562 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45307b7b-ca4c-4f46-9a4b-256df49bb080" path="/var/lib/kubelet/pods/45307b7b-ca4c-4f46-9a4b-256df49bb080/volumes" Jan 30 11:11:22 crc kubenswrapper[4869]: I0130 11:11:22.331848 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-etc-swift\") pod \"swift-storage-0\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " pod="openstack/swift-storage-0" Jan 30 11:11:22 crc kubenswrapper[4869]: E0130 11:11:22.332022 4869 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 30 11:11:22 crc kubenswrapper[4869]: E0130 11:11:22.332038 4869 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 30 11:11:22 crc kubenswrapper[4869]: E0130 11:11:22.332217 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-etc-swift podName:7b1a0e46-1fb4-4ab1-9417-cba939546529 nodeName:}" failed. No retries permitted until 2026-01-30 11:11:26.332199647 +0000 UTC m=+1036.882075713 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-etc-swift") pod "swift-storage-0" (UID: "7b1a0e46-1fb4-4ab1-9417-cba939546529") : configmap "swift-ring-files" not found Jan 30 11:11:22 crc kubenswrapper[4869]: I0130 11:11:22.696821 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-mlpgd"] Jan 30 11:11:22 crc kubenswrapper[4869]: I0130 11:11:22.698603 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-mlpgd" Jan 30 11:11:22 crc kubenswrapper[4869]: I0130 11:11:22.700908 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Jan 30 11:11:22 crc kubenswrapper[4869]: I0130 11:11:22.703060 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-mlpgd"] Jan 30 11:11:22 crc kubenswrapper[4869]: I0130 11:11:22.844031 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cb572069-acff-4d58-9c1b-24f1582e4465-operator-scripts\") pod \"root-account-create-update-mlpgd\" (UID: \"cb572069-acff-4d58-9c1b-24f1582e4465\") " pod="openstack/root-account-create-update-mlpgd" Jan 30 11:11:22 crc kubenswrapper[4869]: I0130 11:11:22.844502 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tp7zm\" (UniqueName: \"kubernetes.io/projected/cb572069-acff-4d58-9c1b-24f1582e4465-kube-api-access-tp7zm\") pod \"root-account-create-update-mlpgd\" (UID: \"cb572069-acff-4d58-9c1b-24f1582e4465\") " pod="openstack/root-account-create-update-mlpgd" Jan 30 11:11:22 crc kubenswrapper[4869]: I0130 11:11:22.946298 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cb572069-acff-4d58-9c1b-24f1582e4465-operator-scripts\") pod \"root-account-create-update-mlpgd\" (UID: \"cb572069-acff-4d58-9c1b-24f1582e4465\") " pod="openstack/root-account-create-update-mlpgd" Jan 30 11:11:22 crc kubenswrapper[4869]: I0130 11:11:22.946366 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp7zm\" (UniqueName: \"kubernetes.io/projected/cb572069-acff-4d58-9c1b-24f1582e4465-kube-api-access-tp7zm\") pod \"root-account-create-update-mlpgd\" (UID: \"cb572069-acff-4d58-9c1b-24f1582e4465\") " pod="openstack/root-account-create-update-mlpgd" Jan 30 11:11:22 crc kubenswrapper[4869]: I0130 11:11:22.948616 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cb572069-acff-4d58-9c1b-24f1582e4465-operator-scripts\") pod \"root-account-create-update-mlpgd\" (UID: \"cb572069-acff-4d58-9c1b-24f1582e4465\") " pod="openstack/root-account-create-update-mlpgd" Jan 30 11:11:22 crc kubenswrapper[4869]: I0130 11:11:22.966833 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tp7zm\" (UniqueName: \"kubernetes.io/projected/cb572069-acff-4d58-9c1b-24f1582e4465-kube-api-access-tp7zm\") pod \"root-account-create-update-mlpgd\" (UID: \"cb572069-acff-4d58-9c1b-24f1582e4465\") " pod="openstack/root-account-create-update-mlpgd" Jan 30 11:11:23 crc kubenswrapper[4869]: I0130 11:11:23.023396 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-mlpgd" Jan 30 11:11:24 crc kubenswrapper[4869]: I0130 11:11:24.126612 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-mlpgd"] Jan 30 11:11:24 crc kubenswrapper[4869]: W0130 11:11:24.140985 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcb572069_acff_4d58_9c1b_24f1582e4465.slice/crio-15e4313ff5e9ab1f7f3a99f2b8aec380f4a94c6920c7e5b17a14d762f729ee2d WatchSource:0}: Error finding container 15e4313ff5e9ab1f7f3a99f2b8aec380f4a94c6920c7e5b17a14d762f729ee2d: Status 404 returned error can't find the container with id 15e4313ff5e9ab1f7f3a99f2b8aec380f4a94c6920c7e5b17a14d762f729ee2d Jan 30 11:11:24 crc kubenswrapper[4869]: I0130 11:11:24.671023 4869 generic.go:334] "Generic (PLEG): container finished" podID="cb572069-acff-4d58-9c1b-24f1582e4465" containerID="9050506c48afca5796f54434fc2e6a2df0c465c34f3a76fd4c4a1d174e1a986d" exitCode=0 Jan 30 11:11:24 crc kubenswrapper[4869]: I0130 11:11:24.671107 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-mlpgd" event={"ID":"cb572069-acff-4d58-9c1b-24f1582e4465","Type":"ContainerDied","Data":"9050506c48afca5796f54434fc2e6a2df0c465c34f3a76fd4c4a1d174e1a986d"} Jan 30 11:11:24 crc kubenswrapper[4869]: I0130 11:11:24.671137 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-mlpgd" event={"ID":"cb572069-acff-4d58-9c1b-24f1582e4465","Type":"ContainerStarted","Data":"15e4313ff5e9ab1f7f3a99f2b8aec380f4a94c6920c7e5b17a14d762f729ee2d"} Jan 30 11:11:24 crc kubenswrapper[4869]: I0130 11:11:24.672547 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-kx76b" event={"ID":"db3f3820-1e34-4ad4-bb89-b587355526a6","Type":"ContainerStarted","Data":"104a07f97fb6c7315162653073ff78d2591275c5c90c23f25e1e1792fa663e96"} Jan 30 11:11:24 crc kubenswrapper[4869]: I0130 11:11:24.701269 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-kx76b" podStartSLOduration=2.691559507 podStartE2EDuration="6.701253588s" podCreationTimestamp="2026-01-30 11:11:18 +0000 UTC" firstStartedPulling="2026-01-30 11:11:19.732695456 +0000 UTC m=+1030.282571522" lastFinishedPulling="2026-01-30 11:11:23.742389527 +0000 UTC m=+1034.292265603" observedRunningTime="2026-01-30 11:11:24.697276735 +0000 UTC m=+1035.247152801" watchObservedRunningTime="2026-01-30 11:11:24.701253588 +0000 UTC m=+1035.251129654" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.290165 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-cqxhv"] Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.291678 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-cqxhv" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.296702 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-cqxhv"] Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.388586 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e4048f70-ac71-4e16-b86c-ea67021c6c58-operator-scripts\") pod \"keystone-db-create-cqxhv\" (UID: \"e4048f70-ac71-4e16-b86c-ea67021c6c58\") " pod="openstack/keystone-db-create-cqxhv" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.388653 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnqlq\" (UniqueName: \"kubernetes.io/projected/e4048f70-ac71-4e16-b86c-ea67021c6c58-kube-api-access-cnqlq\") pod \"keystone-db-create-cqxhv\" (UID: \"e4048f70-ac71-4e16-b86c-ea67021c6c58\") " pod="openstack/keystone-db-create-cqxhv" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.393508 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cd90-account-create-update-ltwwr"] Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.394723 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cd90-account-create-update-ltwwr" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.397052 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.402563 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cd90-account-create-update-ltwwr"] Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.490594 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e4048f70-ac71-4e16-b86c-ea67021c6c58-operator-scripts\") pod \"keystone-db-create-cqxhv\" (UID: \"e4048f70-ac71-4e16-b86c-ea67021c6c58\") " pod="openstack/keystone-db-create-cqxhv" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.490648 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnqlq\" (UniqueName: \"kubernetes.io/projected/e4048f70-ac71-4e16-b86c-ea67021c6c58-kube-api-access-cnqlq\") pod \"keystone-db-create-cqxhv\" (UID: \"e4048f70-ac71-4e16-b86c-ea67021c6c58\") " pod="openstack/keystone-db-create-cqxhv" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.490681 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jdcm\" (UniqueName: \"kubernetes.io/projected/e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d-kube-api-access-4jdcm\") pod \"keystone-cd90-account-create-update-ltwwr\" (UID: \"e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d\") " pod="openstack/keystone-cd90-account-create-update-ltwwr" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.490777 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d-operator-scripts\") pod \"keystone-cd90-account-create-update-ltwwr\" (UID: \"e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d\") " pod="openstack/keystone-cd90-account-create-update-ltwwr" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.491472 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e4048f70-ac71-4e16-b86c-ea67021c6c58-operator-scripts\") pod \"keystone-db-create-cqxhv\" (UID: \"e4048f70-ac71-4e16-b86c-ea67021c6c58\") " pod="openstack/keystone-db-create-cqxhv" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.510297 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnqlq\" (UniqueName: \"kubernetes.io/projected/e4048f70-ac71-4e16-b86c-ea67021c6c58-kube-api-access-cnqlq\") pod \"keystone-db-create-cqxhv\" (UID: \"e4048f70-ac71-4e16-b86c-ea67021c6c58\") " pod="openstack/keystone-db-create-cqxhv" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.592058 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-5rjwn"] Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.592285 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jdcm\" (UniqueName: \"kubernetes.io/projected/e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d-kube-api-access-4jdcm\") pod \"keystone-cd90-account-create-update-ltwwr\" (UID: \"e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d\") " pod="openstack/keystone-cd90-account-create-update-ltwwr" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.600451 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d-operator-scripts\") pod \"keystone-cd90-account-create-update-ltwwr\" (UID: \"e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d\") " pod="openstack/keystone-cd90-account-create-update-ltwwr" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.601312 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d-operator-scripts\") pod \"keystone-cd90-account-create-update-ltwwr\" (UID: \"e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d\") " pod="openstack/keystone-cd90-account-create-update-ltwwr" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.605181 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-5rjwn" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.622296 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-cqxhv" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.630350 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jdcm\" (UniqueName: \"kubernetes.io/projected/e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d-kube-api-access-4jdcm\") pod \"keystone-cd90-account-create-update-ltwwr\" (UID: \"e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d\") " pod="openstack/keystone-cd90-account-create-update-ltwwr" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.636025 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-5rjwn"] Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.704357 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfbfv\" (UniqueName: \"kubernetes.io/projected/0caf28f8-d084-42f8-aa84-bb7ed8ec0fed-kube-api-access-tfbfv\") pod \"placement-db-create-5rjwn\" (UID: \"0caf28f8-d084-42f8-aa84-bb7ed8ec0fed\") " pod="openstack/placement-db-create-5rjwn" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.704568 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0caf28f8-d084-42f8-aa84-bb7ed8ec0fed-operator-scripts\") pod \"placement-db-create-5rjwn\" (UID: \"0caf28f8-d084-42f8-aa84-bb7ed8ec0fed\") " pod="openstack/placement-db-create-5rjwn" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.712339 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cd90-account-create-update-ltwwr" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.728890 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-f8b5-account-create-update-d6m9c"] Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.730292 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-f8b5-account-create-update-d6m9c" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.734267 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.761681 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-f8b5-account-create-update-d6m9c"] Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.807502 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0caf28f8-d084-42f8-aa84-bb7ed8ec0fed-operator-scripts\") pod \"placement-db-create-5rjwn\" (UID: \"0caf28f8-d084-42f8-aa84-bb7ed8ec0fed\") " pod="openstack/placement-db-create-5rjwn" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.807672 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfbfv\" (UniqueName: \"kubernetes.io/projected/0caf28f8-d084-42f8-aa84-bb7ed8ec0fed-kube-api-access-tfbfv\") pod \"placement-db-create-5rjwn\" (UID: \"0caf28f8-d084-42f8-aa84-bb7ed8ec0fed\") " pod="openstack/placement-db-create-5rjwn" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.807742 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fw7rk\" (UniqueName: \"kubernetes.io/projected/61b770ca-7b59-4e86-a262-b1fb6ff4d0d0-kube-api-access-fw7rk\") pod \"placement-f8b5-account-create-update-d6m9c\" (UID: \"61b770ca-7b59-4e86-a262-b1fb6ff4d0d0\") " pod="openstack/placement-f8b5-account-create-update-d6m9c" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.807876 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61b770ca-7b59-4e86-a262-b1fb6ff4d0d0-operator-scripts\") pod \"placement-f8b5-account-create-update-d6m9c\" (UID: \"61b770ca-7b59-4e86-a262-b1fb6ff4d0d0\") " pod="openstack/placement-f8b5-account-create-update-d6m9c" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.809766 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0caf28f8-d084-42f8-aa84-bb7ed8ec0fed-operator-scripts\") pod \"placement-db-create-5rjwn\" (UID: \"0caf28f8-d084-42f8-aa84-bb7ed8ec0fed\") " pod="openstack/placement-db-create-5rjwn" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.832084 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfbfv\" (UniqueName: \"kubernetes.io/projected/0caf28f8-d084-42f8-aa84-bb7ed8ec0fed-kube-api-access-tfbfv\") pod \"placement-db-create-5rjwn\" (UID: \"0caf28f8-d084-42f8-aa84-bb7ed8ec0fed\") " pod="openstack/placement-db-create-5rjwn" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.857490 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-gclj6"] Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.859206 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-gclj6" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.865912 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-gclj6"] Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.909943 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fw7rk\" (UniqueName: \"kubernetes.io/projected/61b770ca-7b59-4e86-a262-b1fb6ff4d0d0-kube-api-access-fw7rk\") pod \"placement-f8b5-account-create-update-d6m9c\" (UID: \"61b770ca-7b59-4e86-a262-b1fb6ff4d0d0\") " pod="openstack/placement-f8b5-account-create-update-d6m9c" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.910065 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61b770ca-7b59-4e86-a262-b1fb6ff4d0d0-operator-scripts\") pod \"placement-f8b5-account-create-update-d6m9c\" (UID: \"61b770ca-7b59-4e86-a262-b1fb6ff4d0d0\") " pod="openstack/placement-f8b5-account-create-update-d6m9c" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.910900 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61b770ca-7b59-4e86-a262-b1fb6ff4d0d0-operator-scripts\") pod \"placement-f8b5-account-create-update-d6m9c\" (UID: \"61b770ca-7b59-4e86-a262-b1fb6ff4d0d0\") " pod="openstack/placement-f8b5-account-create-update-d6m9c" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.923757 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-c00b-account-create-update-thzpf"] Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.926364 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-c00b-account-create-update-thzpf" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.928892 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fw7rk\" (UniqueName: \"kubernetes.io/projected/61b770ca-7b59-4e86-a262-b1fb6ff4d0d0-kube-api-access-fw7rk\") pod \"placement-f8b5-account-create-update-d6m9c\" (UID: \"61b770ca-7b59-4e86-a262-b1fb6ff4d0d0\") " pod="openstack/placement-f8b5-account-create-update-d6m9c" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.929455 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Jan 30 11:11:25 crc kubenswrapper[4869]: I0130 11:11:25.937632 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-c00b-account-create-update-thzpf"] Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.011573 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9120a9e7-f89e-4d11-8481-d352ffe17419-operator-scripts\") pod \"glance-db-create-gclj6\" (UID: \"9120a9e7-f89e-4d11-8481-d352ffe17419\") " pod="openstack/glance-db-create-gclj6" Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.011651 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80814725-2ccf-4f3c-9585-8cdbeb83c2b5-operator-scripts\") pod \"glance-c00b-account-create-update-thzpf\" (UID: \"80814725-2ccf-4f3c-9585-8cdbeb83c2b5\") " pod="openstack/glance-c00b-account-create-update-thzpf" Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.012054 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7x8d\" (UniqueName: \"kubernetes.io/projected/80814725-2ccf-4f3c-9585-8cdbeb83c2b5-kube-api-access-q7x8d\") pod \"glance-c00b-account-create-update-thzpf\" (UID: \"80814725-2ccf-4f3c-9585-8cdbeb83c2b5\") " pod="openstack/glance-c00b-account-create-update-thzpf" Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.012167 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6smw\" (UniqueName: \"kubernetes.io/projected/9120a9e7-f89e-4d11-8481-d352ffe17419-kube-api-access-j6smw\") pod \"glance-db-create-gclj6\" (UID: \"9120a9e7-f89e-4d11-8481-d352ffe17419\") " pod="openstack/glance-db-create-gclj6" Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.025660 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-5rjwn" Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.060601 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-f8b5-account-create-update-d6m9c" Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.113913 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80814725-2ccf-4f3c-9585-8cdbeb83c2b5-operator-scripts\") pod \"glance-c00b-account-create-update-thzpf\" (UID: \"80814725-2ccf-4f3c-9585-8cdbeb83c2b5\") " pod="openstack/glance-c00b-account-create-update-thzpf" Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.114343 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7x8d\" (UniqueName: \"kubernetes.io/projected/80814725-2ccf-4f3c-9585-8cdbeb83c2b5-kube-api-access-q7x8d\") pod \"glance-c00b-account-create-update-thzpf\" (UID: \"80814725-2ccf-4f3c-9585-8cdbeb83c2b5\") " pod="openstack/glance-c00b-account-create-update-thzpf" Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.114371 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6smw\" (UniqueName: \"kubernetes.io/projected/9120a9e7-f89e-4d11-8481-d352ffe17419-kube-api-access-j6smw\") pod \"glance-db-create-gclj6\" (UID: \"9120a9e7-f89e-4d11-8481-d352ffe17419\") " pod="openstack/glance-db-create-gclj6" Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.114485 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9120a9e7-f89e-4d11-8481-d352ffe17419-operator-scripts\") pod \"glance-db-create-gclj6\" (UID: \"9120a9e7-f89e-4d11-8481-d352ffe17419\") " pod="openstack/glance-db-create-gclj6" Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.115658 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9120a9e7-f89e-4d11-8481-d352ffe17419-operator-scripts\") pod \"glance-db-create-gclj6\" (UID: \"9120a9e7-f89e-4d11-8481-d352ffe17419\") " pod="openstack/glance-db-create-gclj6" Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.119938 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80814725-2ccf-4f3c-9585-8cdbeb83c2b5-operator-scripts\") pod \"glance-c00b-account-create-update-thzpf\" (UID: \"80814725-2ccf-4f3c-9585-8cdbeb83c2b5\") " pod="openstack/glance-c00b-account-create-update-thzpf" Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.138396 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6smw\" (UniqueName: \"kubernetes.io/projected/9120a9e7-f89e-4d11-8481-d352ffe17419-kube-api-access-j6smw\") pod \"glance-db-create-gclj6\" (UID: \"9120a9e7-f89e-4d11-8481-d352ffe17419\") " pod="openstack/glance-db-create-gclj6" Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.143031 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7x8d\" (UniqueName: \"kubernetes.io/projected/80814725-2ccf-4f3c-9585-8cdbeb83c2b5-kube-api-access-q7x8d\") pod \"glance-c00b-account-create-update-thzpf\" (UID: \"80814725-2ccf-4f3c-9585-8cdbeb83c2b5\") " pod="openstack/glance-c00b-account-create-update-thzpf" Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.165583 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-mlpgd" Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.196313 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-gclj6" Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.261109 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-c00b-account-create-update-thzpf" Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.315919 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-cqxhv"] Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.317823 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tp7zm\" (UniqueName: \"kubernetes.io/projected/cb572069-acff-4d58-9c1b-24f1582e4465-kube-api-access-tp7zm\") pod \"cb572069-acff-4d58-9c1b-24f1582e4465\" (UID: \"cb572069-acff-4d58-9c1b-24f1582e4465\") " Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.318030 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cb572069-acff-4d58-9c1b-24f1582e4465-operator-scripts\") pod \"cb572069-acff-4d58-9c1b-24f1582e4465\" (UID: \"cb572069-acff-4d58-9c1b-24f1582e4465\") " Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.320274 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb572069-acff-4d58-9c1b-24f1582e4465-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cb572069-acff-4d58-9c1b-24f1582e4465" (UID: "cb572069-acff-4d58-9c1b-24f1582e4465"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.323856 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb572069-acff-4d58-9c1b-24f1582e4465-kube-api-access-tp7zm" (OuterVolumeSpecName: "kube-api-access-tp7zm") pod "cb572069-acff-4d58-9c1b-24f1582e4465" (UID: "cb572069-acff-4d58-9c1b-24f1582e4465"). InnerVolumeSpecName "kube-api-access-tp7zm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:11:26 crc kubenswrapper[4869]: W0130 11:11:26.335859 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4048f70_ac71_4e16_b86c_ea67021c6c58.slice/crio-d1b8775dfd0eec00a78e425d5ab3d4c1dedbd39ef882097b831b4751d3a0de6e WatchSource:0}: Error finding container d1b8775dfd0eec00a78e425d5ab3d4c1dedbd39ef882097b831b4751d3a0de6e: Status 404 returned error can't find the container with id d1b8775dfd0eec00a78e425d5ab3d4c1dedbd39ef882097b831b4751d3a0de6e Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.397842 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cd90-account-create-update-ltwwr"] Jan 30 11:11:26 crc kubenswrapper[4869]: W0130 11:11:26.404519 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode7ed99a1_5866_47cc_9c12_8bb4d3ea9a2d.slice/crio-7ad1024e6ecb8895c64f80ed09a52354f79eaadfef27ffd92cab33c2f84162d4 WatchSource:0}: Error finding container 7ad1024e6ecb8895c64f80ed09a52354f79eaadfef27ffd92cab33c2f84162d4: Status 404 returned error can't find the container with id 7ad1024e6ecb8895c64f80ed09a52354f79eaadfef27ffd92cab33c2f84162d4 Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.421535 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-etc-swift\") pod \"swift-storage-0\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " pod="openstack/swift-storage-0" Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.421638 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tp7zm\" (UniqueName: \"kubernetes.io/projected/cb572069-acff-4d58-9c1b-24f1582e4465-kube-api-access-tp7zm\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.421650 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cb572069-acff-4d58-9c1b-24f1582e4465-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:26 crc kubenswrapper[4869]: E0130 11:11:26.423473 4869 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 30 11:11:26 crc kubenswrapper[4869]: E0130 11:11:26.423493 4869 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 30 11:11:26 crc kubenswrapper[4869]: E0130 11:11:26.423569 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-etc-swift podName:7b1a0e46-1fb4-4ab1-9417-cba939546529 nodeName:}" failed. No retries permitted until 2026-01-30 11:11:34.423550959 +0000 UTC m=+1044.973427025 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-etc-swift") pod "swift-storage-0" (UID: "7b1a0e46-1fb4-4ab1-9417-cba939546529") : configmap "swift-ring-files" not found Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.586050 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-5rjwn"] Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.693168 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-cqxhv" event={"ID":"e4048f70-ac71-4e16-b86c-ea67021c6c58","Type":"ContainerStarted","Data":"5412689076dd8c8ded30415d779a828810d54b049f71dbf07b75a1f99c3a2172"} Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.693231 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-cqxhv" event={"ID":"e4048f70-ac71-4e16-b86c-ea67021c6c58","Type":"ContainerStarted","Data":"d1b8775dfd0eec00a78e425d5ab3d4c1dedbd39ef882097b831b4751d3a0de6e"} Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.694907 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-mlpgd" event={"ID":"cb572069-acff-4d58-9c1b-24f1582e4465","Type":"ContainerDied","Data":"15e4313ff5e9ab1f7f3a99f2b8aec380f4a94c6920c7e5b17a14d762f729ee2d"} Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.694941 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-mlpgd" Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.694948 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15e4313ff5e9ab1f7f3a99f2b8aec380f4a94c6920c7e5b17a14d762f729ee2d" Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.696059 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-5rjwn" event={"ID":"0caf28f8-d084-42f8-aa84-bb7ed8ec0fed","Type":"ContainerStarted","Data":"6f38f86b4a4ad042f2e511f80956c4b51222276ebab0b15d506554a0fd5ea29f"} Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.698385 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cd90-account-create-update-ltwwr" event={"ID":"e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d","Type":"ContainerStarted","Data":"3e68c3724a9ab3f614c802fd8e24540459ddd7f937cae24b3480a37e300d0143"} Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.698422 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cd90-account-create-update-ltwwr" event={"ID":"e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d","Type":"ContainerStarted","Data":"7ad1024e6ecb8895c64f80ed09a52354f79eaadfef27ffd92cab33c2f84162d4"} Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.717025 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-cqxhv" podStartSLOduration=1.717004299 podStartE2EDuration="1.717004299s" podCreationTimestamp="2026-01-30 11:11:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:11:26.707589451 +0000 UTC m=+1037.257465517" watchObservedRunningTime="2026-01-30 11:11:26.717004299 +0000 UTC m=+1037.266880365" Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.730359 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-f8b5-account-create-update-d6m9c"] Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.734766 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cd90-account-create-update-ltwwr" podStartSLOduration=1.734746793 podStartE2EDuration="1.734746793s" podCreationTimestamp="2026-01-30 11:11:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:11:26.724009588 +0000 UTC m=+1037.273885654" watchObservedRunningTime="2026-01-30 11:11:26.734746793 +0000 UTC m=+1037.284622859" Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.813504 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-gclj6"] Jan 30 11:11:26 crc kubenswrapper[4869]: W0130 11:11:26.821665 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9120a9e7_f89e_4d11_8481_d352ffe17419.slice/crio-7fd1c86edcfb04824118a7708ffe12833b6957fc49e7f9c371d5acea6259aacc WatchSource:0}: Error finding container 7fd1c86edcfb04824118a7708ffe12833b6957fc49e7f9c371d5acea6259aacc: Status 404 returned error can't find the container with id 7fd1c86edcfb04824118a7708ffe12833b6957fc49e7f9c371d5acea6259aacc Jan 30 11:11:26 crc kubenswrapper[4869]: I0130 11:11:26.886200 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-c00b-account-create-update-thzpf"] Jan 30 11:11:26 crc kubenswrapper[4869]: W0130 11:11:26.985085 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod80814725_2ccf_4f3c_9585_8cdbeb83c2b5.slice/crio-47eae39db09e0842e4cbb8ce2ecfcafc9932426f62ba982ed7ba954f7045a6f5 WatchSource:0}: Error finding container 47eae39db09e0842e4cbb8ce2ecfcafc9932426f62ba982ed7ba954f7045a6f5: Status 404 returned error can't find the container with id 47eae39db09e0842e4cbb8ce2ecfcafc9932426f62ba982ed7ba954f7045a6f5 Jan 30 11:11:27 crc kubenswrapper[4869]: I0130 11:11:27.668946 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" Jan 30 11:11:27 crc kubenswrapper[4869]: I0130 11:11:27.757329 4869 generic.go:334] "Generic (PLEG): container finished" podID="e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d" containerID="3e68c3724a9ab3f614c802fd8e24540459ddd7f937cae24b3480a37e300d0143" exitCode=0 Jan 30 11:11:27 crc kubenswrapper[4869]: I0130 11:11:27.757419 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cd90-account-create-update-ltwwr" event={"ID":"e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d","Type":"ContainerDied","Data":"3e68c3724a9ab3f614c802fd8e24540459ddd7f937cae24b3480a37e300d0143"} Jan 30 11:11:27 crc kubenswrapper[4869]: I0130 11:11:27.759856 4869 generic.go:334] "Generic (PLEG): container finished" podID="e4048f70-ac71-4e16-b86c-ea67021c6c58" containerID="5412689076dd8c8ded30415d779a828810d54b049f71dbf07b75a1f99c3a2172" exitCode=0 Jan 30 11:11:27 crc kubenswrapper[4869]: I0130 11:11:27.760164 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-cqxhv" event={"ID":"e4048f70-ac71-4e16-b86c-ea67021c6c58","Type":"ContainerDied","Data":"5412689076dd8c8ded30415d779a828810d54b049f71dbf07b75a1f99c3a2172"} Jan 30 11:11:27 crc kubenswrapper[4869]: I0130 11:11:27.765487 4869 generic.go:334] "Generic (PLEG): container finished" podID="80814725-2ccf-4f3c-9585-8cdbeb83c2b5" containerID="70a7311ab926d3674361f8b0f2b836b0d2d1f74952207b1f7074f68482cfaa04" exitCode=0 Jan 30 11:11:27 crc kubenswrapper[4869]: I0130 11:11:27.765816 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-c00b-account-create-update-thzpf" event={"ID":"80814725-2ccf-4f3c-9585-8cdbeb83c2b5","Type":"ContainerDied","Data":"70a7311ab926d3674361f8b0f2b836b0d2d1f74952207b1f7074f68482cfaa04"} Jan 30 11:11:27 crc kubenswrapper[4869]: I0130 11:11:27.766216 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-c00b-account-create-update-thzpf" event={"ID":"80814725-2ccf-4f3c-9585-8cdbeb83c2b5","Type":"ContainerStarted","Data":"47eae39db09e0842e4cbb8ce2ecfcafc9932426f62ba982ed7ba954f7045a6f5"} Jan 30 11:11:27 crc kubenswrapper[4869]: I0130 11:11:27.769090 4869 generic.go:334] "Generic (PLEG): container finished" podID="9120a9e7-f89e-4d11-8481-d352ffe17419" containerID="f0436d40c14323a1dde1389e6d1c8d2fc2eb8394c2ad72ae9f5ed44b19148575" exitCode=0 Jan 30 11:11:27 crc kubenswrapper[4869]: I0130 11:11:27.769262 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-gclj6" event={"ID":"9120a9e7-f89e-4d11-8481-d352ffe17419","Type":"ContainerDied","Data":"f0436d40c14323a1dde1389e6d1c8d2fc2eb8394c2ad72ae9f5ed44b19148575"} Jan 30 11:11:27 crc kubenswrapper[4869]: I0130 11:11:27.769358 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-gclj6" event={"ID":"9120a9e7-f89e-4d11-8481-d352ffe17419","Type":"ContainerStarted","Data":"7fd1c86edcfb04824118a7708ffe12833b6957fc49e7f9c371d5acea6259aacc"} Jan 30 11:11:27 crc kubenswrapper[4869]: I0130 11:11:27.771012 4869 generic.go:334] "Generic (PLEG): container finished" podID="0caf28f8-d084-42f8-aa84-bb7ed8ec0fed" containerID="de0f4d059a4550335bce61729aa4f9cc7d282f7512964c918bbc226587317020" exitCode=0 Jan 30 11:11:27 crc kubenswrapper[4869]: I0130 11:11:27.771146 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-5rjwn" event={"ID":"0caf28f8-d084-42f8-aa84-bb7ed8ec0fed","Type":"ContainerDied","Data":"de0f4d059a4550335bce61729aa4f9cc7d282f7512964c918bbc226587317020"} Jan 30 11:11:27 crc kubenswrapper[4869]: I0130 11:11:27.783319 4869 generic.go:334] "Generic (PLEG): container finished" podID="61b770ca-7b59-4e86-a262-b1fb6ff4d0d0" containerID="e5ced6cbb69c7f8d3419ec3e36cf4944ee0d4070b2698a3b94fb7b264d9892dd" exitCode=0 Jan 30 11:11:27 crc kubenswrapper[4869]: I0130 11:11:27.783375 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-f8b5-account-create-update-d6m9c" event={"ID":"61b770ca-7b59-4e86-a262-b1fb6ff4d0d0","Type":"ContainerDied","Data":"e5ced6cbb69c7f8d3419ec3e36cf4944ee0d4070b2698a3b94fb7b264d9892dd"} Jan 30 11:11:27 crc kubenswrapper[4869]: I0130 11:11:27.783405 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-f8b5-account-create-update-d6m9c" event={"ID":"61b770ca-7b59-4e86-a262-b1fb6ff4d0d0","Type":"ContainerStarted","Data":"e81d4834cb599a89c7684b3240438d0931dbbd72a9739809cbaa62b07190c43a"} Jan 30 11:11:27 crc kubenswrapper[4869]: I0130 11:11:27.796793 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-ckn8s"] Jan 30 11:11:27 crc kubenswrapper[4869]: I0130 11:11:27.797318 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-ckn8s" podUID="88fa474b-ed48-4ca3-af15-7217e4a9a6df" containerName="dnsmasq-dns" containerID="cri-o://20d721ed432621e0f53ece3b312f30178fba820957276d75c5f0c58c3fad8041" gracePeriod=10 Jan 30 11:11:28 crc kubenswrapper[4869]: I0130 11:11:28.257593 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-ckn8s" Jan 30 11:11:28 crc kubenswrapper[4869]: I0130 11:11:28.385207 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ljjx8\" (UniqueName: \"kubernetes.io/projected/88fa474b-ed48-4ca3-af15-7217e4a9a6df-kube-api-access-ljjx8\") pod \"88fa474b-ed48-4ca3-af15-7217e4a9a6df\" (UID: \"88fa474b-ed48-4ca3-af15-7217e4a9a6df\") " Jan 30 11:11:28 crc kubenswrapper[4869]: I0130 11:11:28.385352 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88fa474b-ed48-4ca3-af15-7217e4a9a6df-dns-svc\") pod \"88fa474b-ed48-4ca3-af15-7217e4a9a6df\" (UID: \"88fa474b-ed48-4ca3-af15-7217e4a9a6df\") " Jan 30 11:11:28 crc kubenswrapper[4869]: I0130 11:11:28.385453 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88fa474b-ed48-4ca3-af15-7217e4a9a6df-config\") pod \"88fa474b-ed48-4ca3-af15-7217e4a9a6df\" (UID: \"88fa474b-ed48-4ca3-af15-7217e4a9a6df\") " Jan 30 11:11:28 crc kubenswrapper[4869]: I0130 11:11:28.391450 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88fa474b-ed48-4ca3-af15-7217e4a9a6df-kube-api-access-ljjx8" (OuterVolumeSpecName: "kube-api-access-ljjx8") pod "88fa474b-ed48-4ca3-af15-7217e4a9a6df" (UID: "88fa474b-ed48-4ca3-af15-7217e4a9a6df"). InnerVolumeSpecName "kube-api-access-ljjx8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:11:28 crc kubenswrapper[4869]: I0130 11:11:28.425614 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88fa474b-ed48-4ca3-af15-7217e4a9a6df-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "88fa474b-ed48-4ca3-af15-7217e4a9a6df" (UID: "88fa474b-ed48-4ca3-af15-7217e4a9a6df"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:11:28 crc kubenswrapper[4869]: I0130 11:11:28.433045 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88fa474b-ed48-4ca3-af15-7217e4a9a6df-config" (OuterVolumeSpecName: "config") pod "88fa474b-ed48-4ca3-af15-7217e4a9a6df" (UID: "88fa474b-ed48-4ca3-af15-7217e4a9a6df"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:11:28 crc kubenswrapper[4869]: I0130 11:11:28.487850 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88fa474b-ed48-4ca3-af15-7217e4a9a6df-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:28 crc kubenswrapper[4869]: I0130 11:11:28.487892 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88fa474b-ed48-4ca3-af15-7217e4a9a6df-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:28 crc kubenswrapper[4869]: I0130 11:11:28.487904 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ljjx8\" (UniqueName: \"kubernetes.io/projected/88fa474b-ed48-4ca3-af15-7217e4a9a6df-kube-api-access-ljjx8\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:28 crc kubenswrapper[4869]: I0130 11:11:28.793026 4869 generic.go:334] "Generic (PLEG): container finished" podID="88fa474b-ed48-4ca3-af15-7217e4a9a6df" containerID="20d721ed432621e0f53ece3b312f30178fba820957276d75c5f0c58c3fad8041" exitCode=0 Jan 30 11:11:28 crc kubenswrapper[4869]: I0130 11:11:28.793084 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-ckn8s" Jan 30 11:11:28 crc kubenswrapper[4869]: I0130 11:11:28.793150 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-ckn8s" event={"ID":"88fa474b-ed48-4ca3-af15-7217e4a9a6df","Type":"ContainerDied","Data":"20d721ed432621e0f53ece3b312f30178fba820957276d75c5f0c58c3fad8041"} Jan 30 11:11:28 crc kubenswrapper[4869]: I0130 11:11:28.793180 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-ckn8s" event={"ID":"88fa474b-ed48-4ca3-af15-7217e4a9a6df","Type":"ContainerDied","Data":"9251236df24e7a1f9712353dbebd6c495a4a29c874a1232bb777886da462c40c"} Jan 30 11:11:28 crc kubenswrapper[4869]: I0130 11:11:28.793223 4869 scope.go:117] "RemoveContainer" containerID="20d721ed432621e0f53ece3b312f30178fba820957276d75c5f0c58c3fad8041" Jan 30 11:11:28 crc kubenswrapper[4869]: I0130 11:11:28.828606 4869 scope.go:117] "RemoveContainer" containerID="048ac12ad4445822a7891e047b3fe07de25e6ce35957da9df8f7cb93db63a633" Jan 30 11:11:28 crc kubenswrapper[4869]: I0130 11:11:28.836370 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-ckn8s"] Jan 30 11:11:28 crc kubenswrapper[4869]: I0130 11:11:28.847292 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-ckn8s"] Jan 30 11:11:28 crc kubenswrapper[4869]: I0130 11:11:28.853144 4869 scope.go:117] "RemoveContainer" containerID="20d721ed432621e0f53ece3b312f30178fba820957276d75c5f0c58c3fad8041" Jan 30 11:11:28 crc kubenswrapper[4869]: E0130 11:11:28.854098 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20d721ed432621e0f53ece3b312f30178fba820957276d75c5f0c58c3fad8041\": container with ID starting with 20d721ed432621e0f53ece3b312f30178fba820957276d75c5f0c58c3fad8041 not found: ID does not exist" containerID="20d721ed432621e0f53ece3b312f30178fba820957276d75c5f0c58c3fad8041" Jan 30 11:11:28 crc kubenswrapper[4869]: I0130 11:11:28.854152 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20d721ed432621e0f53ece3b312f30178fba820957276d75c5f0c58c3fad8041"} err="failed to get container status \"20d721ed432621e0f53ece3b312f30178fba820957276d75c5f0c58c3fad8041\": rpc error: code = NotFound desc = could not find container \"20d721ed432621e0f53ece3b312f30178fba820957276d75c5f0c58c3fad8041\": container with ID starting with 20d721ed432621e0f53ece3b312f30178fba820957276d75c5f0c58c3fad8041 not found: ID does not exist" Jan 30 11:11:28 crc kubenswrapper[4869]: I0130 11:11:28.854187 4869 scope.go:117] "RemoveContainer" containerID="048ac12ad4445822a7891e047b3fe07de25e6ce35957da9df8f7cb93db63a633" Jan 30 11:11:28 crc kubenswrapper[4869]: E0130 11:11:28.854891 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"048ac12ad4445822a7891e047b3fe07de25e6ce35957da9df8f7cb93db63a633\": container with ID starting with 048ac12ad4445822a7891e047b3fe07de25e6ce35957da9df8f7cb93db63a633 not found: ID does not exist" containerID="048ac12ad4445822a7891e047b3fe07de25e6ce35957da9df8f7cb93db63a633" Jan 30 11:11:28 crc kubenswrapper[4869]: I0130 11:11:28.854924 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"048ac12ad4445822a7891e047b3fe07de25e6ce35957da9df8f7cb93db63a633"} err="failed to get container status \"048ac12ad4445822a7891e047b3fe07de25e6ce35957da9df8f7cb93db63a633\": rpc error: code = NotFound desc = could not find container \"048ac12ad4445822a7891e047b3fe07de25e6ce35957da9df8f7cb93db63a633\": container with ID starting with 048ac12ad4445822a7891e047b3fe07de25e6ce35957da9df8f7cb93db63a633 not found: ID does not exist" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.066313 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-mlpgd"] Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.073343 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-mlpgd"] Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.213138 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-5rjwn" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.309138 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfbfv\" (UniqueName: \"kubernetes.io/projected/0caf28f8-d084-42f8-aa84-bb7ed8ec0fed-kube-api-access-tfbfv\") pod \"0caf28f8-d084-42f8-aa84-bb7ed8ec0fed\" (UID: \"0caf28f8-d084-42f8-aa84-bb7ed8ec0fed\") " Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.309212 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0caf28f8-d084-42f8-aa84-bb7ed8ec0fed-operator-scripts\") pod \"0caf28f8-d084-42f8-aa84-bb7ed8ec0fed\" (UID: \"0caf28f8-d084-42f8-aa84-bb7ed8ec0fed\") " Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.310072 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0caf28f8-d084-42f8-aa84-bb7ed8ec0fed-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0caf28f8-d084-42f8-aa84-bb7ed8ec0fed" (UID: "0caf28f8-d084-42f8-aa84-bb7ed8ec0fed"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.312886 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0caf28f8-d084-42f8-aa84-bb7ed8ec0fed-kube-api-access-tfbfv" (OuterVolumeSpecName: "kube-api-access-tfbfv") pod "0caf28f8-d084-42f8-aa84-bb7ed8ec0fed" (UID: "0caf28f8-d084-42f8-aa84-bb7ed8ec0fed"). InnerVolumeSpecName "kube-api-access-tfbfv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.375489 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-gclj6" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.396409 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cd90-account-create-update-ltwwr" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.401833 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-f8b5-account-create-update-d6m9c" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.410797 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfbfv\" (UniqueName: \"kubernetes.io/projected/0caf28f8-d084-42f8-aa84-bb7ed8ec0fed-kube-api-access-tfbfv\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.410828 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0caf28f8-d084-42f8-aa84-bb7ed8ec0fed-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.417692 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-c00b-account-create-update-thzpf" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.430508 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-cqxhv" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.511577 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4jdcm\" (UniqueName: \"kubernetes.io/projected/e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d-kube-api-access-4jdcm\") pod \"e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d\" (UID: \"e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d\") " Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.511649 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e4048f70-ac71-4e16-b86c-ea67021c6c58-operator-scripts\") pod \"e4048f70-ac71-4e16-b86c-ea67021c6c58\" (UID: \"e4048f70-ac71-4e16-b86c-ea67021c6c58\") " Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.511674 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6smw\" (UniqueName: \"kubernetes.io/projected/9120a9e7-f89e-4d11-8481-d352ffe17419-kube-api-access-j6smw\") pod \"9120a9e7-f89e-4d11-8481-d352ffe17419\" (UID: \"9120a9e7-f89e-4d11-8481-d352ffe17419\") " Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.511761 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d-operator-scripts\") pod \"e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d\" (UID: \"e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d\") " Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.511861 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61b770ca-7b59-4e86-a262-b1fb6ff4d0d0-operator-scripts\") pod \"61b770ca-7b59-4e86-a262-b1fb6ff4d0d0\" (UID: \"61b770ca-7b59-4e86-a262-b1fb6ff4d0d0\") " Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.512288 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4048f70-ac71-4e16-b86c-ea67021c6c58-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e4048f70-ac71-4e16-b86c-ea67021c6c58" (UID: "e4048f70-ac71-4e16-b86c-ea67021c6c58"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.512307 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d" (UID: "e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.512387 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fw7rk\" (UniqueName: \"kubernetes.io/projected/61b770ca-7b59-4e86-a262-b1fb6ff4d0d0-kube-api-access-fw7rk\") pod \"61b770ca-7b59-4e86-a262-b1fb6ff4d0d0\" (UID: \"61b770ca-7b59-4e86-a262-b1fb6ff4d0d0\") " Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.512487 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61b770ca-7b59-4e86-a262-b1fb6ff4d0d0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "61b770ca-7b59-4e86-a262-b1fb6ff4d0d0" (UID: "61b770ca-7b59-4e86-a262-b1fb6ff4d0d0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.512743 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9120a9e7-f89e-4d11-8481-d352ffe17419-operator-scripts\") pod \"9120a9e7-f89e-4d11-8481-d352ffe17419\" (UID: \"9120a9e7-f89e-4d11-8481-d352ffe17419\") " Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.512775 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cnqlq\" (UniqueName: \"kubernetes.io/projected/e4048f70-ac71-4e16-b86c-ea67021c6c58-kube-api-access-cnqlq\") pod \"e4048f70-ac71-4e16-b86c-ea67021c6c58\" (UID: \"e4048f70-ac71-4e16-b86c-ea67021c6c58\") " Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.512842 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7x8d\" (UniqueName: \"kubernetes.io/projected/80814725-2ccf-4f3c-9585-8cdbeb83c2b5-kube-api-access-q7x8d\") pod \"80814725-2ccf-4f3c-9585-8cdbeb83c2b5\" (UID: \"80814725-2ccf-4f3c-9585-8cdbeb83c2b5\") " Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.512888 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80814725-2ccf-4f3c-9585-8cdbeb83c2b5-operator-scripts\") pod \"80814725-2ccf-4f3c-9585-8cdbeb83c2b5\" (UID: \"80814725-2ccf-4f3c-9585-8cdbeb83c2b5\") " Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.513377 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9120a9e7-f89e-4d11-8481-d352ffe17419-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9120a9e7-f89e-4d11-8481-d352ffe17419" (UID: "9120a9e7-f89e-4d11-8481-d352ffe17419"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.513457 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e4048f70-ac71-4e16-b86c-ea67021c6c58-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.513533 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.513552 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61b770ca-7b59-4e86-a262-b1fb6ff4d0d0-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.513636 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80814725-2ccf-4f3c-9585-8cdbeb83c2b5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "80814725-2ccf-4f3c-9585-8cdbeb83c2b5" (UID: "80814725-2ccf-4f3c-9585-8cdbeb83c2b5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.516216 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d-kube-api-access-4jdcm" (OuterVolumeSpecName: "kube-api-access-4jdcm") pod "e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d" (UID: "e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d"). InnerVolumeSpecName "kube-api-access-4jdcm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.516771 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4048f70-ac71-4e16-b86c-ea67021c6c58-kube-api-access-cnqlq" (OuterVolumeSpecName: "kube-api-access-cnqlq") pod "e4048f70-ac71-4e16-b86c-ea67021c6c58" (UID: "e4048f70-ac71-4e16-b86c-ea67021c6c58"). InnerVolumeSpecName "kube-api-access-cnqlq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.516878 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61b770ca-7b59-4e86-a262-b1fb6ff4d0d0-kube-api-access-fw7rk" (OuterVolumeSpecName: "kube-api-access-fw7rk") pod "61b770ca-7b59-4e86-a262-b1fb6ff4d0d0" (UID: "61b770ca-7b59-4e86-a262-b1fb6ff4d0d0"). InnerVolumeSpecName "kube-api-access-fw7rk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.517410 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9120a9e7-f89e-4d11-8481-d352ffe17419-kube-api-access-j6smw" (OuterVolumeSpecName: "kube-api-access-j6smw") pod "9120a9e7-f89e-4d11-8481-d352ffe17419" (UID: "9120a9e7-f89e-4d11-8481-d352ffe17419"). InnerVolumeSpecName "kube-api-access-j6smw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.518526 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80814725-2ccf-4f3c-9585-8cdbeb83c2b5-kube-api-access-q7x8d" (OuterVolumeSpecName: "kube-api-access-q7x8d") pod "80814725-2ccf-4f3c-9585-8cdbeb83c2b5" (UID: "80814725-2ccf-4f3c-9585-8cdbeb83c2b5"). InnerVolumeSpecName "kube-api-access-q7x8d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.614951 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9120a9e7-f89e-4d11-8481-d352ffe17419-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.615002 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cnqlq\" (UniqueName: \"kubernetes.io/projected/e4048f70-ac71-4e16-b86c-ea67021c6c58-kube-api-access-cnqlq\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.615018 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7x8d\" (UniqueName: \"kubernetes.io/projected/80814725-2ccf-4f3c-9585-8cdbeb83c2b5-kube-api-access-q7x8d\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.615030 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80814725-2ccf-4f3c-9585-8cdbeb83c2b5-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.615041 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4jdcm\" (UniqueName: \"kubernetes.io/projected/e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d-kube-api-access-4jdcm\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.615051 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6smw\" (UniqueName: \"kubernetes.io/projected/9120a9e7-f89e-4d11-8481-d352ffe17419-kube-api-access-j6smw\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.615062 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fw7rk\" (UniqueName: \"kubernetes.io/projected/61b770ca-7b59-4e86-a262-b1fb6ff4d0d0-kube-api-access-fw7rk\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.801485 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-gclj6" event={"ID":"9120a9e7-f89e-4d11-8481-d352ffe17419","Type":"ContainerDied","Data":"7fd1c86edcfb04824118a7708ffe12833b6957fc49e7f9c371d5acea6259aacc"} Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.801787 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7fd1c86edcfb04824118a7708ffe12833b6957fc49e7f9c371d5acea6259aacc" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.801530 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-gclj6" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.803285 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-5rjwn" event={"ID":"0caf28f8-d084-42f8-aa84-bb7ed8ec0fed","Type":"ContainerDied","Data":"6f38f86b4a4ad042f2e511f80956c4b51222276ebab0b15d506554a0fd5ea29f"} Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.803324 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f38f86b4a4ad042f2e511f80956c4b51222276ebab0b15d506554a0fd5ea29f" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.803402 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-5rjwn" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.804936 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-f8b5-account-create-update-d6m9c" event={"ID":"61b770ca-7b59-4e86-a262-b1fb6ff4d0d0","Type":"ContainerDied","Data":"e81d4834cb599a89c7684b3240438d0931dbbd72a9739809cbaa62b07190c43a"} Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.804955 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-f8b5-account-create-update-d6m9c" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.804969 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e81d4834cb599a89c7684b3240438d0931dbbd72a9739809cbaa62b07190c43a" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.807962 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cd90-account-create-update-ltwwr" event={"ID":"e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d","Type":"ContainerDied","Data":"7ad1024e6ecb8895c64f80ed09a52354f79eaadfef27ffd92cab33c2f84162d4"} Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.807992 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7ad1024e6ecb8895c64f80ed09a52354f79eaadfef27ffd92cab33c2f84162d4" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.808034 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cd90-account-create-update-ltwwr" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.812770 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-cqxhv" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.812770 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-cqxhv" event={"ID":"e4048f70-ac71-4e16-b86c-ea67021c6c58","Type":"ContainerDied","Data":"d1b8775dfd0eec00a78e425d5ab3d4c1dedbd39ef882097b831b4751d3a0de6e"} Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.813010 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d1b8775dfd0eec00a78e425d5ab3d4c1dedbd39ef882097b831b4751d3a0de6e" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.813551 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-c00b-account-create-update-thzpf" event={"ID":"80814725-2ccf-4f3c-9585-8cdbeb83c2b5","Type":"ContainerDied","Data":"47eae39db09e0842e4cbb8ce2ecfcafc9932426f62ba982ed7ba954f7045a6f5"} Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.813573 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="47eae39db09e0842e4cbb8ce2ecfcafc9932426f62ba982ed7ba954f7045a6f5" Jan 30 11:11:29 crc kubenswrapper[4869]: I0130 11:11:29.813606 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-c00b-account-create-update-thzpf" Jan 30 11:11:30 crc kubenswrapper[4869]: I0130 11:11:30.141503 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88fa474b-ed48-4ca3-af15-7217e4a9a6df" path="/var/lib/kubelet/pods/88fa474b-ed48-4ca3-af15-7217e4a9a6df/volumes" Jan 30 11:11:30 crc kubenswrapper[4869]: I0130 11:11:30.142124 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb572069-acff-4d58-9c1b-24f1582e4465" path="/var/lib/kubelet/pods/cb572069-acff-4d58-9c1b-24f1582e4465/volumes" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.148676 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-kcn5x"] Jan 30 11:11:31 crc kubenswrapper[4869]: E0130 11:11:31.149287 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88fa474b-ed48-4ca3-af15-7217e4a9a6df" containerName="dnsmasq-dns" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.149308 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="88fa474b-ed48-4ca3-af15-7217e4a9a6df" containerName="dnsmasq-dns" Jan 30 11:11:31 crc kubenswrapper[4869]: E0130 11:11:31.149325 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0caf28f8-d084-42f8-aa84-bb7ed8ec0fed" containerName="mariadb-database-create" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.149332 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0caf28f8-d084-42f8-aa84-bb7ed8ec0fed" containerName="mariadb-database-create" Jan 30 11:11:31 crc kubenswrapper[4869]: E0130 11:11:31.149344 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb572069-acff-4d58-9c1b-24f1582e4465" containerName="mariadb-account-create-update" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.149354 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb572069-acff-4d58-9c1b-24f1582e4465" containerName="mariadb-account-create-update" Jan 30 11:11:31 crc kubenswrapper[4869]: E0130 11:11:31.149365 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d" containerName="mariadb-account-create-update" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.149372 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d" containerName="mariadb-account-create-update" Jan 30 11:11:31 crc kubenswrapper[4869]: E0130 11:11:31.149383 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9120a9e7-f89e-4d11-8481-d352ffe17419" containerName="mariadb-database-create" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.149390 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="9120a9e7-f89e-4d11-8481-d352ffe17419" containerName="mariadb-database-create" Jan 30 11:11:31 crc kubenswrapper[4869]: E0130 11:11:31.149404 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4048f70-ac71-4e16-b86c-ea67021c6c58" containerName="mariadb-database-create" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.149411 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4048f70-ac71-4e16-b86c-ea67021c6c58" containerName="mariadb-database-create" Jan 30 11:11:31 crc kubenswrapper[4869]: E0130 11:11:31.149422 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61b770ca-7b59-4e86-a262-b1fb6ff4d0d0" containerName="mariadb-account-create-update" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.149430 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="61b770ca-7b59-4e86-a262-b1fb6ff4d0d0" containerName="mariadb-account-create-update" Jan 30 11:11:31 crc kubenswrapper[4869]: E0130 11:11:31.149445 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88fa474b-ed48-4ca3-af15-7217e4a9a6df" containerName="init" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.149452 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="88fa474b-ed48-4ca3-af15-7217e4a9a6df" containerName="init" Jan 30 11:11:31 crc kubenswrapper[4869]: E0130 11:11:31.149471 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80814725-2ccf-4f3c-9585-8cdbeb83c2b5" containerName="mariadb-account-create-update" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.149477 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="80814725-2ccf-4f3c-9585-8cdbeb83c2b5" containerName="mariadb-account-create-update" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.149659 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="80814725-2ccf-4f3c-9585-8cdbeb83c2b5" containerName="mariadb-account-create-update" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.149671 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d" containerName="mariadb-account-create-update" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.149681 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb572069-acff-4d58-9c1b-24f1582e4465" containerName="mariadb-account-create-update" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.149689 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="61b770ca-7b59-4e86-a262-b1fb6ff4d0d0" containerName="mariadb-account-create-update" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.149701 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="88fa474b-ed48-4ca3-af15-7217e4a9a6df" containerName="dnsmasq-dns" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.149732 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4048f70-ac71-4e16-b86c-ea67021c6c58" containerName="mariadb-database-create" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.149739 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="9120a9e7-f89e-4d11-8481-d352ffe17419" containerName="mariadb-database-create" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.149749 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="0caf28f8-d084-42f8-aa84-bb7ed8ec0fed" containerName="mariadb-database-create" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.150527 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-kcn5x" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.153808 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.155103 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-jfgbn" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.160381 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-kcn5x"] Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.240333 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/413090e9-1b8c-43a1-9550-150f0baf022f-config-data\") pod \"glance-db-sync-kcn5x\" (UID: \"413090e9-1b8c-43a1-9550-150f0baf022f\") " pod="openstack/glance-db-sync-kcn5x" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.240642 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6nvh\" (UniqueName: \"kubernetes.io/projected/413090e9-1b8c-43a1-9550-150f0baf022f-kube-api-access-x6nvh\") pod \"glance-db-sync-kcn5x\" (UID: \"413090e9-1b8c-43a1-9550-150f0baf022f\") " pod="openstack/glance-db-sync-kcn5x" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.240744 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/413090e9-1b8c-43a1-9550-150f0baf022f-combined-ca-bundle\") pod \"glance-db-sync-kcn5x\" (UID: \"413090e9-1b8c-43a1-9550-150f0baf022f\") " pod="openstack/glance-db-sync-kcn5x" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.240837 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/413090e9-1b8c-43a1-9550-150f0baf022f-db-sync-config-data\") pod \"glance-db-sync-kcn5x\" (UID: \"413090e9-1b8c-43a1-9550-150f0baf022f\") " pod="openstack/glance-db-sync-kcn5x" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.342303 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/413090e9-1b8c-43a1-9550-150f0baf022f-config-data\") pod \"glance-db-sync-kcn5x\" (UID: \"413090e9-1b8c-43a1-9550-150f0baf022f\") " pod="openstack/glance-db-sync-kcn5x" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.342357 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6nvh\" (UniqueName: \"kubernetes.io/projected/413090e9-1b8c-43a1-9550-150f0baf022f-kube-api-access-x6nvh\") pod \"glance-db-sync-kcn5x\" (UID: \"413090e9-1b8c-43a1-9550-150f0baf022f\") " pod="openstack/glance-db-sync-kcn5x" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.342401 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/413090e9-1b8c-43a1-9550-150f0baf022f-combined-ca-bundle\") pod \"glance-db-sync-kcn5x\" (UID: \"413090e9-1b8c-43a1-9550-150f0baf022f\") " pod="openstack/glance-db-sync-kcn5x" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.342473 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/413090e9-1b8c-43a1-9550-150f0baf022f-db-sync-config-data\") pod \"glance-db-sync-kcn5x\" (UID: \"413090e9-1b8c-43a1-9550-150f0baf022f\") " pod="openstack/glance-db-sync-kcn5x" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.349157 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/413090e9-1b8c-43a1-9550-150f0baf022f-db-sync-config-data\") pod \"glance-db-sync-kcn5x\" (UID: \"413090e9-1b8c-43a1-9550-150f0baf022f\") " pod="openstack/glance-db-sync-kcn5x" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.349232 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/413090e9-1b8c-43a1-9550-150f0baf022f-config-data\") pod \"glance-db-sync-kcn5x\" (UID: \"413090e9-1b8c-43a1-9550-150f0baf022f\") " pod="openstack/glance-db-sync-kcn5x" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.350351 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/413090e9-1b8c-43a1-9550-150f0baf022f-combined-ca-bundle\") pod \"glance-db-sync-kcn5x\" (UID: \"413090e9-1b8c-43a1-9550-150f0baf022f\") " pod="openstack/glance-db-sync-kcn5x" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.359177 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6nvh\" (UniqueName: \"kubernetes.io/projected/413090e9-1b8c-43a1-9550-150f0baf022f-kube-api-access-x6nvh\") pod \"glance-db-sync-kcn5x\" (UID: \"413090e9-1b8c-43a1-9550-150f0baf022f\") " pod="openstack/glance-db-sync-kcn5x" Jan 30 11:11:31 crc kubenswrapper[4869]: I0130 11:11:31.469254 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-kcn5x" Jan 30 11:11:32 crc kubenswrapper[4869]: I0130 11:11:32.058563 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-kcn5x"] Jan 30 11:11:32 crc kubenswrapper[4869]: W0130 11:11:32.064747 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod413090e9_1b8c_43a1_9550_150f0baf022f.slice/crio-0e9625b65ec8bfa0a885fb33e2b9eb87b5a029de4863badc5c40f751f1eccda0 WatchSource:0}: Error finding container 0e9625b65ec8bfa0a885fb33e2b9eb87b5a029de4863badc5c40f751f1eccda0: Status 404 returned error can't find the container with id 0e9625b65ec8bfa0a885fb33e2b9eb87b5a029de4863badc5c40f751f1eccda0 Jan 30 11:11:32 crc kubenswrapper[4869]: I0130 11:11:32.839871 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-kcn5x" event={"ID":"413090e9-1b8c-43a1-9550-150f0baf022f","Type":"ContainerStarted","Data":"0e9625b65ec8bfa0a885fb33e2b9eb87b5a029de4863badc5c40f751f1eccda0"} Jan 30 11:11:33 crc kubenswrapper[4869]: I0130 11:11:33.731334 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Jan 30 11:11:33 crc kubenswrapper[4869]: I0130 11:11:33.851636 4869 generic.go:334] "Generic (PLEG): container finished" podID="db3f3820-1e34-4ad4-bb89-b587355526a6" containerID="104a07f97fb6c7315162653073ff78d2591275c5c90c23f25e1e1792fa663e96" exitCode=0 Jan 30 11:11:33 crc kubenswrapper[4869]: I0130 11:11:33.851680 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-kx76b" event={"ID":"db3f3820-1e34-4ad4-bb89-b587355526a6","Type":"ContainerDied","Data":"104a07f97fb6c7315162653073ff78d2591275c5c90c23f25e1e1792fa663e96"} Jan 30 11:11:34 crc kubenswrapper[4869]: I0130 11:11:34.077663 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-qfh9c"] Jan 30 11:11:34 crc kubenswrapper[4869]: I0130 11:11:34.079348 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-qfh9c" Jan 30 11:11:34 crc kubenswrapper[4869]: I0130 11:11:34.083583 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Jan 30 11:11:34 crc kubenswrapper[4869]: I0130 11:11:34.103906 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-qfh9c"] Jan 30 11:11:34 crc kubenswrapper[4869]: I0130 11:11:34.202695 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqxkh\" (UniqueName: \"kubernetes.io/projected/8b9027f1-3da9-4ee0-a3bd-1041accd8f3b-kube-api-access-jqxkh\") pod \"root-account-create-update-qfh9c\" (UID: \"8b9027f1-3da9-4ee0-a3bd-1041accd8f3b\") " pod="openstack/root-account-create-update-qfh9c" Jan 30 11:11:34 crc kubenswrapper[4869]: I0130 11:11:34.202764 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b9027f1-3da9-4ee0-a3bd-1041accd8f3b-operator-scripts\") pod \"root-account-create-update-qfh9c\" (UID: \"8b9027f1-3da9-4ee0-a3bd-1041accd8f3b\") " pod="openstack/root-account-create-update-qfh9c" Jan 30 11:11:34 crc kubenswrapper[4869]: I0130 11:11:34.304260 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqxkh\" (UniqueName: \"kubernetes.io/projected/8b9027f1-3da9-4ee0-a3bd-1041accd8f3b-kube-api-access-jqxkh\") pod \"root-account-create-update-qfh9c\" (UID: \"8b9027f1-3da9-4ee0-a3bd-1041accd8f3b\") " pod="openstack/root-account-create-update-qfh9c" Jan 30 11:11:34 crc kubenswrapper[4869]: I0130 11:11:34.304346 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b9027f1-3da9-4ee0-a3bd-1041accd8f3b-operator-scripts\") pod \"root-account-create-update-qfh9c\" (UID: \"8b9027f1-3da9-4ee0-a3bd-1041accd8f3b\") " pod="openstack/root-account-create-update-qfh9c" Jan 30 11:11:34 crc kubenswrapper[4869]: I0130 11:11:34.306379 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b9027f1-3da9-4ee0-a3bd-1041accd8f3b-operator-scripts\") pod \"root-account-create-update-qfh9c\" (UID: \"8b9027f1-3da9-4ee0-a3bd-1041accd8f3b\") " pod="openstack/root-account-create-update-qfh9c" Jan 30 11:11:34 crc kubenswrapper[4869]: I0130 11:11:34.325327 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqxkh\" (UniqueName: \"kubernetes.io/projected/8b9027f1-3da9-4ee0-a3bd-1041accd8f3b-kube-api-access-jqxkh\") pod \"root-account-create-update-qfh9c\" (UID: \"8b9027f1-3da9-4ee0-a3bd-1041accd8f3b\") " pod="openstack/root-account-create-update-qfh9c" Jan 30 11:11:34 crc kubenswrapper[4869]: I0130 11:11:34.408215 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-qfh9c" Jan 30 11:11:34 crc kubenswrapper[4869]: I0130 11:11:34.507751 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-etc-swift\") pod \"swift-storage-0\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " pod="openstack/swift-storage-0" Jan 30 11:11:34 crc kubenswrapper[4869]: I0130 11:11:34.515026 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-etc-swift\") pod \"swift-storage-0\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " pod="openstack/swift-storage-0" Jan 30 11:11:34 crc kubenswrapper[4869]: I0130 11:11:34.753230 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 30 11:11:34 crc kubenswrapper[4869]: I0130 11:11:34.940328 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-qfh9c"] Jan 30 11:11:34 crc kubenswrapper[4869]: W0130 11:11:34.954935 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8b9027f1_3da9_4ee0_a3bd_1041accd8f3b.slice/crio-2255c3efe2f900b54f41d30e0bc91f39a24cc03d1961ae25ab6549013e1c2c83 WatchSource:0}: Error finding container 2255c3efe2f900b54f41d30e0bc91f39a24cc03d1961ae25ab6549013e1c2c83: Status 404 returned error can't find the container with id 2255c3efe2f900b54f41d30e0bc91f39a24cc03d1961ae25ab6549013e1c2c83 Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.256174 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-kx76b" Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.428311 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/db3f3820-1e34-4ad4-bb89-b587355526a6-ring-data-devices\") pod \"db3f3820-1e34-4ad4-bb89-b587355526a6\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.428412 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db3f3820-1e34-4ad4-bb89-b587355526a6-scripts\") pod \"db3f3820-1e34-4ad4-bb89-b587355526a6\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.428468 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db3f3820-1e34-4ad4-bb89-b587355526a6-combined-ca-bundle\") pod \"db3f3820-1e34-4ad4-bb89-b587355526a6\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.428543 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/db3f3820-1e34-4ad4-bb89-b587355526a6-etc-swift\") pod \"db3f3820-1e34-4ad4-bb89-b587355526a6\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.428579 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/db3f3820-1e34-4ad4-bb89-b587355526a6-swiftconf\") pod \"db3f3820-1e34-4ad4-bb89-b587355526a6\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.428625 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/db3f3820-1e34-4ad4-bb89-b587355526a6-dispersionconf\") pod \"db3f3820-1e34-4ad4-bb89-b587355526a6\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.428665 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ln6dx\" (UniqueName: \"kubernetes.io/projected/db3f3820-1e34-4ad4-bb89-b587355526a6-kube-api-access-ln6dx\") pod \"db3f3820-1e34-4ad4-bb89-b587355526a6\" (UID: \"db3f3820-1e34-4ad4-bb89-b587355526a6\") " Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.429619 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db3f3820-1e34-4ad4-bb89-b587355526a6-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "db3f3820-1e34-4ad4-bb89-b587355526a6" (UID: "db3f3820-1e34-4ad4-bb89-b587355526a6"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.430245 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db3f3820-1e34-4ad4-bb89-b587355526a6-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "db3f3820-1e34-4ad4-bb89-b587355526a6" (UID: "db3f3820-1e34-4ad4-bb89-b587355526a6"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.435940 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.446699 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db3f3820-1e34-4ad4-bb89-b587355526a6-kube-api-access-ln6dx" (OuterVolumeSpecName: "kube-api-access-ln6dx") pod "db3f3820-1e34-4ad4-bb89-b587355526a6" (UID: "db3f3820-1e34-4ad4-bb89-b587355526a6"). InnerVolumeSpecName "kube-api-access-ln6dx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.452927 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db3f3820-1e34-4ad4-bb89-b587355526a6-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "db3f3820-1e34-4ad4-bb89-b587355526a6" (UID: "db3f3820-1e34-4ad4-bb89-b587355526a6"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.454423 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db3f3820-1e34-4ad4-bb89-b587355526a6-scripts" (OuterVolumeSpecName: "scripts") pod "db3f3820-1e34-4ad4-bb89-b587355526a6" (UID: "db3f3820-1e34-4ad4-bb89-b587355526a6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.454995 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db3f3820-1e34-4ad4-bb89-b587355526a6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "db3f3820-1e34-4ad4-bb89-b587355526a6" (UID: "db3f3820-1e34-4ad4-bb89-b587355526a6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.459970 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db3f3820-1e34-4ad4-bb89-b587355526a6-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "db3f3820-1e34-4ad4-bb89-b587355526a6" (UID: "db3f3820-1e34-4ad4-bb89-b587355526a6"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:11:35 crc kubenswrapper[4869]: W0130 11:11:35.463914 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7b1a0e46_1fb4_4ab1_9417_cba939546529.slice/crio-0e8afe951cff3c900f198b5d6d99a90ac991981ccb9568db72fc68bca99867dc WatchSource:0}: Error finding container 0e8afe951cff3c900f198b5d6d99a90ac991981ccb9568db72fc68bca99867dc: Status 404 returned error can't find the container with id 0e8afe951cff3c900f198b5d6d99a90ac991981ccb9568db72fc68bca99867dc Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.531170 4869 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/db3f3820-1e34-4ad4-bb89-b587355526a6-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.531206 4869 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/db3f3820-1e34-4ad4-bb89-b587355526a6-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.531218 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ln6dx\" (UniqueName: \"kubernetes.io/projected/db3f3820-1e34-4ad4-bb89-b587355526a6-kube-api-access-ln6dx\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.531231 4869 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/db3f3820-1e34-4ad4-bb89-b587355526a6-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.531243 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db3f3820-1e34-4ad4-bb89-b587355526a6-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.531252 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db3f3820-1e34-4ad4-bb89-b587355526a6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.531261 4869 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/db3f3820-1e34-4ad4-bb89-b587355526a6-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.922586 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-qfh9c" event={"ID":"8b9027f1-3da9-4ee0-a3bd-1041accd8f3b","Type":"ContainerStarted","Data":"0be86ef1bd8fec824fe2385731b416873ec19562cb9c12407f448b3913f77e3e"} Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.922635 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-qfh9c" event={"ID":"8b9027f1-3da9-4ee0-a3bd-1041accd8f3b","Type":"ContainerStarted","Data":"2255c3efe2f900b54f41d30e0bc91f39a24cc03d1961ae25ab6549013e1c2c83"} Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.929126 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerStarted","Data":"0e8afe951cff3c900f198b5d6d99a90ac991981ccb9568db72fc68bca99867dc"} Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.932202 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-kx76b" event={"ID":"db3f3820-1e34-4ad4-bb89-b587355526a6","Type":"ContainerDied","Data":"ea8c2efef7bc769295859afb8e74822e177c9121dddcdff74ceddd199649a5f2"} Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.932244 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea8c2efef7bc769295859afb8e74822e177c9121dddcdff74ceddd199649a5f2" Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.932308 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-kx76b" Jan 30 11:11:35 crc kubenswrapper[4869]: I0130 11:11:35.966465 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/root-account-create-update-qfh9c" podStartSLOduration=1.9664411849999999 podStartE2EDuration="1.966441185s" podCreationTimestamp="2026-01-30 11:11:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:11:35.940149549 +0000 UTC m=+1046.490025615" watchObservedRunningTime="2026-01-30 11:11:35.966441185 +0000 UTC m=+1046.516317251" Jan 30 11:11:36 crc kubenswrapper[4869]: I0130 11:11:36.943778 4869 generic.go:334] "Generic (PLEG): container finished" podID="8b9027f1-3da9-4ee0-a3bd-1041accd8f3b" containerID="0be86ef1bd8fec824fe2385731b416873ec19562cb9c12407f448b3913f77e3e" exitCode=0 Jan 30 11:11:36 crc kubenswrapper[4869]: I0130 11:11:36.943908 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-qfh9c" event={"ID":"8b9027f1-3da9-4ee0-a3bd-1041accd8f3b","Type":"ContainerDied","Data":"0be86ef1bd8fec824fe2385731b416873ec19562cb9c12407f448b3913f77e3e"} Jan 30 11:11:38 crc kubenswrapper[4869]: I0130 11:11:38.148762 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-gm6nb" podUID="f7eb4552-ad08-470d-b4c5-63c937f11717" containerName="ovn-controller" probeResult="failure" output=< Jan 30 11:11:38 crc kubenswrapper[4869]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Jan 30 11:11:38 crc kubenswrapper[4869]: > Jan 30 11:11:38 crc kubenswrapper[4869]: I0130 11:11:38.163065 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:11:39 crc kubenswrapper[4869]: I0130 11:11:39.988683 4869 generic.go:334] "Generic (PLEG): container finished" podID="15b1a123-3831-4fa6-bc52-3f0cf30953f9" containerID="ac6a69179427ff89823caab2c5058cec074bad55265a0d608c121c2402131129" exitCode=0 Jan 30 11:11:39 crc kubenswrapper[4869]: I0130 11:11:39.988808 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"15b1a123-3831-4fa6-bc52-3f0cf30953f9","Type":"ContainerDied","Data":"ac6a69179427ff89823caab2c5058cec074bad55265a0d608c121c2402131129"} Jan 30 11:11:39 crc kubenswrapper[4869]: I0130 11:11:39.991529 4869 generic.go:334] "Generic (PLEG): container finished" podID="4d1e4183-a136-428f-9bd8-e857a603da8f" containerID="55eceea13613475cd0e0edba738b2fea582c353eb4608bd2ab58553bd72ccab9" exitCode=0 Jan 30 11:11:39 crc kubenswrapper[4869]: I0130 11:11:39.991581 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4d1e4183-a136-428f-9bd8-e857a603da8f","Type":"ContainerDied","Data":"55eceea13613475cd0e0edba738b2fea582c353eb4608bd2ab58553bd72ccab9"} Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.129211 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-gm6nb" podUID="f7eb4552-ad08-470d-b4c5-63c937f11717" containerName="ovn-controller" probeResult="failure" output=< Jan 30 11:11:43 crc kubenswrapper[4869]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Jan 30 11:11:43 crc kubenswrapper[4869]: > Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.163490 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.369170 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-gm6nb-config-rs96f"] Jan 30 11:11:43 crc kubenswrapper[4869]: E0130 11:11:43.369618 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db3f3820-1e34-4ad4-bb89-b587355526a6" containerName="swift-ring-rebalance" Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.369641 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="db3f3820-1e34-4ad4-bb89-b587355526a6" containerName="swift-ring-rebalance" Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.369885 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="db3f3820-1e34-4ad4-bb89-b587355526a6" containerName="swift-ring-rebalance" Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.370563 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gm6nb-config-rs96f" Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.372880 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.377842 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-gm6nb-config-rs96f"] Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.498974 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5a1781d2-406c-4775-b67a-116815cde995-var-run\") pod \"ovn-controller-gm6nb-config-rs96f\" (UID: \"5a1781d2-406c-4775-b67a-116815cde995\") " pod="openstack/ovn-controller-gm6nb-config-rs96f" Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.502323 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vkpz\" (UniqueName: \"kubernetes.io/projected/5a1781d2-406c-4775-b67a-116815cde995-kube-api-access-7vkpz\") pod \"ovn-controller-gm6nb-config-rs96f\" (UID: \"5a1781d2-406c-4775-b67a-116815cde995\") " pod="openstack/ovn-controller-gm6nb-config-rs96f" Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.502386 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5a1781d2-406c-4775-b67a-116815cde995-var-run-ovn\") pod \"ovn-controller-gm6nb-config-rs96f\" (UID: \"5a1781d2-406c-4775-b67a-116815cde995\") " pod="openstack/ovn-controller-gm6nb-config-rs96f" Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.502459 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5a1781d2-406c-4775-b67a-116815cde995-additional-scripts\") pod \"ovn-controller-gm6nb-config-rs96f\" (UID: \"5a1781d2-406c-4775-b67a-116815cde995\") " pod="openstack/ovn-controller-gm6nb-config-rs96f" Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.502498 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5a1781d2-406c-4775-b67a-116815cde995-scripts\") pod \"ovn-controller-gm6nb-config-rs96f\" (UID: \"5a1781d2-406c-4775-b67a-116815cde995\") " pod="openstack/ovn-controller-gm6nb-config-rs96f" Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.502532 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5a1781d2-406c-4775-b67a-116815cde995-var-log-ovn\") pod \"ovn-controller-gm6nb-config-rs96f\" (UID: \"5a1781d2-406c-4775-b67a-116815cde995\") " pod="openstack/ovn-controller-gm6nb-config-rs96f" Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.603680 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5a1781d2-406c-4775-b67a-116815cde995-additional-scripts\") pod \"ovn-controller-gm6nb-config-rs96f\" (UID: \"5a1781d2-406c-4775-b67a-116815cde995\") " pod="openstack/ovn-controller-gm6nb-config-rs96f" Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.603775 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5a1781d2-406c-4775-b67a-116815cde995-scripts\") pod \"ovn-controller-gm6nb-config-rs96f\" (UID: \"5a1781d2-406c-4775-b67a-116815cde995\") " pod="openstack/ovn-controller-gm6nb-config-rs96f" Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.603803 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5a1781d2-406c-4775-b67a-116815cde995-var-log-ovn\") pod \"ovn-controller-gm6nb-config-rs96f\" (UID: \"5a1781d2-406c-4775-b67a-116815cde995\") " pod="openstack/ovn-controller-gm6nb-config-rs96f" Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.603833 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5a1781d2-406c-4775-b67a-116815cde995-var-run\") pod \"ovn-controller-gm6nb-config-rs96f\" (UID: \"5a1781d2-406c-4775-b67a-116815cde995\") " pod="openstack/ovn-controller-gm6nb-config-rs96f" Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.603893 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vkpz\" (UniqueName: \"kubernetes.io/projected/5a1781d2-406c-4775-b67a-116815cde995-kube-api-access-7vkpz\") pod \"ovn-controller-gm6nb-config-rs96f\" (UID: \"5a1781d2-406c-4775-b67a-116815cde995\") " pod="openstack/ovn-controller-gm6nb-config-rs96f" Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.603920 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5a1781d2-406c-4775-b67a-116815cde995-var-run-ovn\") pod \"ovn-controller-gm6nb-config-rs96f\" (UID: \"5a1781d2-406c-4775-b67a-116815cde995\") " pod="openstack/ovn-controller-gm6nb-config-rs96f" Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.604230 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5a1781d2-406c-4775-b67a-116815cde995-var-run-ovn\") pod \"ovn-controller-gm6nb-config-rs96f\" (UID: \"5a1781d2-406c-4775-b67a-116815cde995\") " pod="openstack/ovn-controller-gm6nb-config-rs96f" Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.604231 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5a1781d2-406c-4775-b67a-116815cde995-var-log-ovn\") pod \"ovn-controller-gm6nb-config-rs96f\" (UID: \"5a1781d2-406c-4775-b67a-116815cde995\") " pod="openstack/ovn-controller-gm6nb-config-rs96f" Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.604325 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5a1781d2-406c-4775-b67a-116815cde995-var-run\") pod \"ovn-controller-gm6nb-config-rs96f\" (UID: \"5a1781d2-406c-4775-b67a-116815cde995\") " pod="openstack/ovn-controller-gm6nb-config-rs96f" Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.604749 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5a1781d2-406c-4775-b67a-116815cde995-additional-scripts\") pod \"ovn-controller-gm6nb-config-rs96f\" (UID: \"5a1781d2-406c-4775-b67a-116815cde995\") " pod="openstack/ovn-controller-gm6nb-config-rs96f" Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.606142 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5a1781d2-406c-4775-b67a-116815cde995-scripts\") pod \"ovn-controller-gm6nb-config-rs96f\" (UID: \"5a1781d2-406c-4775-b67a-116815cde995\") " pod="openstack/ovn-controller-gm6nb-config-rs96f" Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.622962 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vkpz\" (UniqueName: \"kubernetes.io/projected/5a1781d2-406c-4775-b67a-116815cde995-kube-api-access-7vkpz\") pod \"ovn-controller-gm6nb-config-rs96f\" (UID: \"5a1781d2-406c-4775-b67a-116815cde995\") " pod="openstack/ovn-controller-gm6nb-config-rs96f" Jan 30 11:11:43 crc kubenswrapper[4869]: I0130 11:11:43.726543 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gm6nb-config-rs96f" Jan 30 11:11:45 crc kubenswrapper[4869]: I0130 11:11:45.869751 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-qfh9c" Jan 30 11:11:46 crc kubenswrapper[4869]: I0130 11:11:46.040037 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-qfh9c" event={"ID":"8b9027f1-3da9-4ee0-a3bd-1041accd8f3b","Type":"ContainerDied","Data":"2255c3efe2f900b54f41d30e0bc91f39a24cc03d1961ae25ab6549013e1c2c83"} Jan 30 11:11:46 crc kubenswrapper[4869]: I0130 11:11:46.040395 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2255c3efe2f900b54f41d30e0bc91f39a24cc03d1961ae25ab6549013e1c2c83" Jan 30 11:11:46 crc kubenswrapper[4869]: I0130 11:11:46.040050 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-qfh9c" Jan 30 11:11:46 crc kubenswrapper[4869]: I0130 11:11:46.050299 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqxkh\" (UniqueName: \"kubernetes.io/projected/8b9027f1-3da9-4ee0-a3bd-1041accd8f3b-kube-api-access-jqxkh\") pod \"8b9027f1-3da9-4ee0-a3bd-1041accd8f3b\" (UID: \"8b9027f1-3da9-4ee0-a3bd-1041accd8f3b\") " Jan 30 11:11:46 crc kubenswrapper[4869]: I0130 11:11:46.050441 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b9027f1-3da9-4ee0-a3bd-1041accd8f3b-operator-scripts\") pod \"8b9027f1-3da9-4ee0-a3bd-1041accd8f3b\" (UID: \"8b9027f1-3da9-4ee0-a3bd-1041accd8f3b\") " Jan 30 11:11:46 crc kubenswrapper[4869]: I0130 11:11:46.051273 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b9027f1-3da9-4ee0-a3bd-1041accd8f3b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8b9027f1-3da9-4ee0-a3bd-1041accd8f3b" (UID: "8b9027f1-3da9-4ee0-a3bd-1041accd8f3b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:11:46 crc kubenswrapper[4869]: I0130 11:11:46.058849 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b9027f1-3da9-4ee0-a3bd-1041accd8f3b-kube-api-access-jqxkh" (OuterVolumeSpecName: "kube-api-access-jqxkh") pod "8b9027f1-3da9-4ee0-a3bd-1041accd8f3b" (UID: "8b9027f1-3da9-4ee0-a3bd-1041accd8f3b"). InnerVolumeSpecName "kube-api-access-jqxkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:11:46 crc kubenswrapper[4869]: I0130 11:11:46.151928 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b9027f1-3da9-4ee0-a3bd-1041accd8f3b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:46 crc kubenswrapper[4869]: I0130 11:11:46.152013 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqxkh\" (UniqueName: \"kubernetes.io/projected/8b9027f1-3da9-4ee0-a3bd-1041accd8f3b-kube-api-access-jqxkh\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:46 crc kubenswrapper[4869]: I0130 11:11:46.707213 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-gm6nb-config-rs96f"] Jan 30 11:11:46 crc kubenswrapper[4869]: W0130 11:11:46.709529 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5a1781d2_406c_4775_b67a_116815cde995.slice/crio-6332c8af0b5e35c56337a6b85530a5d34e0ccea2664bffd88f7e17266663571d WatchSource:0}: Error finding container 6332c8af0b5e35c56337a6b85530a5d34e0ccea2664bffd88f7e17266663571d: Status 404 returned error can't find the container with id 6332c8af0b5e35c56337a6b85530a5d34e0ccea2664bffd88f7e17266663571d Jan 30 11:11:47 crc kubenswrapper[4869]: I0130 11:11:47.056627 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"15b1a123-3831-4fa6-bc52-3f0cf30953f9","Type":"ContainerStarted","Data":"0cb29fabcce8f4cf1036b218ffdfe4e6ecf114fa1c35dcd0949e31985511687e"} Jan 30 11:11:47 crc kubenswrapper[4869]: I0130 11:11:47.057106 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:11:47 crc kubenswrapper[4869]: I0130 11:11:47.059341 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4d1e4183-a136-428f-9bd8-e857a603da8f","Type":"ContainerStarted","Data":"3fc1c2d02d30cc153ef6285ef6f4336e2c23b7fb060a908a71d5a2f75ee1261a"} Jan 30 11:11:47 crc kubenswrapper[4869]: I0130 11:11:47.059580 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Jan 30 11:11:47 crc kubenswrapper[4869]: I0130 11:11:47.062421 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-gm6nb-config-rs96f" event={"ID":"5a1781d2-406c-4775-b67a-116815cde995","Type":"ContainerStarted","Data":"6332c8af0b5e35c56337a6b85530a5d34e0ccea2664bffd88f7e17266663571d"} Jan 30 11:11:47 crc kubenswrapper[4869]: I0130 11:11:47.064009 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-kcn5x" event={"ID":"413090e9-1b8c-43a1-9550-150f0baf022f","Type":"ContainerStarted","Data":"1a899ce54a669e416e3efafc56286fab9b4ed702e1c2a4e1d21f491569dad96b"} Jan 30 11:11:47 crc kubenswrapper[4869]: I0130 11:11:47.066328 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerStarted","Data":"f485c0dfd027365de561b952055dc9630a3eb742e8b7b5fbc78313ec86a5772e"} Jan 30 11:11:47 crc kubenswrapper[4869]: I0130 11:11:47.066376 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerStarted","Data":"e8224a2745bbe075dafb160da11e1dafa60447cd322ab3064698b3cb694f996d"} Jan 30 11:11:47 crc kubenswrapper[4869]: I0130 11:11:47.078830 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=57.674718467 podStartE2EDuration="1m6.078813502s" podCreationTimestamp="2026-01-30 11:10:41 +0000 UTC" firstStartedPulling="2026-01-30 11:10:56.87176981 +0000 UTC m=+1007.421645876" lastFinishedPulling="2026-01-30 11:11:05.275864845 +0000 UTC m=+1015.825740911" observedRunningTime="2026-01-30 11:11:47.078460672 +0000 UTC m=+1057.628336748" watchObservedRunningTime="2026-01-30 11:11:47.078813502 +0000 UTC m=+1057.628689568" Jan 30 11:11:47 crc kubenswrapper[4869]: I0130 11:11:47.113685 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=57.461213497 podStartE2EDuration="1m6.113664381s" podCreationTimestamp="2026-01-30 11:10:41 +0000 UTC" firstStartedPulling="2026-01-30 11:10:56.526586259 +0000 UTC m=+1007.076462325" lastFinishedPulling="2026-01-30 11:11:05.179037143 +0000 UTC m=+1015.728913209" observedRunningTime="2026-01-30 11:11:47.106390384 +0000 UTC m=+1057.656266450" watchObservedRunningTime="2026-01-30 11:11:47.113664381 +0000 UTC m=+1057.663540447" Jan 30 11:11:47 crc kubenswrapper[4869]: I0130 11:11:47.142676 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-kcn5x" podStartSLOduration=1.951653708 podStartE2EDuration="16.142654783s" podCreationTimestamp="2026-01-30 11:11:31 +0000 UTC" firstStartedPulling="2026-01-30 11:11:32.067109813 +0000 UTC m=+1042.616985879" lastFinishedPulling="2026-01-30 11:11:46.258110898 +0000 UTC m=+1056.807986954" observedRunningTime="2026-01-30 11:11:47.135467469 +0000 UTC m=+1057.685343535" watchObservedRunningTime="2026-01-30 11:11:47.142654783 +0000 UTC m=+1057.692530849" Jan 30 11:11:48 crc kubenswrapper[4869]: I0130 11:11:48.081267 4869 generic.go:334] "Generic (PLEG): container finished" podID="5a1781d2-406c-4775-b67a-116815cde995" containerID="ee92fad2a749ee8d2b76ed5e3e331a021f8ec879fcbe4e3a84b98b8a4e28bf2c" exitCode=0 Jan 30 11:11:48 crc kubenswrapper[4869]: I0130 11:11:48.081596 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-gm6nb-config-rs96f" event={"ID":"5a1781d2-406c-4775-b67a-116815cde995","Type":"ContainerDied","Data":"ee92fad2a749ee8d2b76ed5e3e331a021f8ec879fcbe4e3a84b98b8a4e28bf2c"} Jan 30 11:11:48 crc kubenswrapper[4869]: I0130 11:11:48.085363 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerStarted","Data":"9062f1d85cf44cdab858a6bee6da1f5afe762771c26d3b6e53a3b618a6533e24"} Jan 30 11:11:48 crc kubenswrapper[4869]: I0130 11:11:48.085404 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerStarted","Data":"59365e20746e6cb7518cf9197ed2d89e60bb59adac7d8138ec6dae90296333f2"} Jan 30 11:11:48 crc kubenswrapper[4869]: I0130 11:11:48.171774 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-gm6nb" Jan 30 11:11:49 crc kubenswrapper[4869]: I0130 11:11:49.094981 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerStarted","Data":"124390ed6584fb765f5eaac9acb1121748399b48696c737116ce08db4dd7bfb0"} Jan 30 11:11:49 crc kubenswrapper[4869]: I0130 11:11:49.356254 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gm6nb-config-rs96f" Jan 30 11:11:49 crc kubenswrapper[4869]: I0130 11:11:49.417861 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5a1781d2-406c-4775-b67a-116815cde995-var-run\") pod \"5a1781d2-406c-4775-b67a-116815cde995\" (UID: \"5a1781d2-406c-4775-b67a-116815cde995\") " Jan 30 11:11:49 crc kubenswrapper[4869]: I0130 11:11:49.417974 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5a1781d2-406c-4775-b67a-116815cde995-scripts\") pod \"5a1781d2-406c-4775-b67a-116815cde995\" (UID: \"5a1781d2-406c-4775-b67a-116815cde995\") " Jan 30 11:11:49 crc kubenswrapper[4869]: I0130 11:11:49.418068 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vkpz\" (UniqueName: \"kubernetes.io/projected/5a1781d2-406c-4775-b67a-116815cde995-kube-api-access-7vkpz\") pod \"5a1781d2-406c-4775-b67a-116815cde995\" (UID: \"5a1781d2-406c-4775-b67a-116815cde995\") " Jan 30 11:11:49 crc kubenswrapper[4869]: I0130 11:11:49.418114 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5a1781d2-406c-4775-b67a-116815cde995-additional-scripts\") pod \"5a1781d2-406c-4775-b67a-116815cde995\" (UID: \"5a1781d2-406c-4775-b67a-116815cde995\") " Jan 30 11:11:49 crc kubenswrapper[4869]: I0130 11:11:49.418160 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5a1781d2-406c-4775-b67a-116815cde995-var-run-ovn\") pod \"5a1781d2-406c-4775-b67a-116815cde995\" (UID: \"5a1781d2-406c-4775-b67a-116815cde995\") " Jan 30 11:11:49 crc kubenswrapper[4869]: I0130 11:11:49.418224 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5a1781d2-406c-4775-b67a-116815cde995-var-log-ovn\") pod \"5a1781d2-406c-4775-b67a-116815cde995\" (UID: \"5a1781d2-406c-4775-b67a-116815cde995\") " Jan 30 11:11:49 crc kubenswrapper[4869]: I0130 11:11:49.418346 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5a1781d2-406c-4775-b67a-116815cde995-var-run" (OuterVolumeSpecName: "var-run") pod "5a1781d2-406c-4775-b67a-116815cde995" (UID: "5a1781d2-406c-4775-b67a-116815cde995"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:11:49 crc kubenswrapper[4869]: I0130 11:11:49.418668 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5a1781d2-406c-4775-b67a-116815cde995-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "5a1781d2-406c-4775-b67a-116815cde995" (UID: "5a1781d2-406c-4775-b67a-116815cde995"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:11:49 crc kubenswrapper[4869]: I0130 11:11:49.418812 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5a1781d2-406c-4775-b67a-116815cde995-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "5a1781d2-406c-4775-b67a-116815cde995" (UID: "5a1781d2-406c-4775-b67a-116815cde995"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:11:49 crc kubenswrapper[4869]: I0130 11:11:49.419352 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a1781d2-406c-4775-b67a-116815cde995-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "5a1781d2-406c-4775-b67a-116815cde995" (UID: "5a1781d2-406c-4775-b67a-116815cde995"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:11:49 crc kubenswrapper[4869]: I0130 11:11:49.419518 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a1781d2-406c-4775-b67a-116815cde995-scripts" (OuterVolumeSpecName: "scripts") pod "5a1781d2-406c-4775-b67a-116815cde995" (UID: "5a1781d2-406c-4775-b67a-116815cde995"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:11:49 crc kubenswrapper[4869]: I0130 11:11:49.419563 4869 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5a1781d2-406c-4775-b67a-116815cde995-var-run\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:49 crc kubenswrapper[4869]: I0130 11:11:49.419586 4869 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5a1781d2-406c-4775-b67a-116815cde995-additional-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:49 crc kubenswrapper[4869]: I0130 11:11:49.419600 4869 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5a1781d2-406c-4775-b67a-116815cde995-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:49 crc kubenswrapper[4869]: I0130 11:11:49.419616 4869 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5a1781d2-406c-4775-b67a-116815cde995-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:49 crc kubenswrapper[4869]: I0130 11:11:49.424375 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a1781d2-406c-4775-b67a-116815cde995-kube-api-access-7vkpz" (OuterVolumeSpecName: "kube-api-access-7vkpz") pod "5a1781d2-406c-4775-b67a-116815cde995" (UID: "5a1781d2-406c-4775-b67a-116815cde995"). InnerVolumeSpecName "kube-api-access-7vkpz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:11:49 crc kubenswrapper[4869]: I0130 11:11:49.521483 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5a1781d2-406c-4775-b67a-116815cde995-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:49 crc kubenswrapper[4869]: I0130 11:11:49.521552 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vkpz\" (UniqueName: \"kubernetes.io/projected/5a1781d2-406c-4775-b67a-116815cde995-kube-api-access-7vkpz\") on node \"crc\" DevicePath \"\"" Jan 30 11:11:50 crc kubenswrapper[4869]: I0130 11:11:50.119823 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-gm6nb-config-rs96f" event={"ID":"5a1781d2-406c-4775-b67a-116815cde995","Type":"ContainerDied","Data":"6332c8af0b5e35c56337a6b85530a5d34e0ccea2664bffd88f7e17266663571d"} Jan 30 11:11:50 crc kubenswrapper[4869]: I0130 11:11:50.119868 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6332c8af0b5e35c56337a6b85530a5d34e0ccea2664bffd88f7e17266663571d" Jan 30 11:11:50 crc kubenswrapper[4869]: I0130 11:11:50.119901 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gm6nb-config-rs96f" Jan 30 11:11:50 crc kubenswrapper[4869]: I0130 11:11:50.143471 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerStarted","Data":"20275096c286a0ae8845e55c14e269921083b740de5bce384f7ce1e846435e9b"} Jan 30 11:11:50 crc kubenswrapper[4869]: I0130 11:11:50.143510 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerStarted","Data":"b439885a7cc9ae304f7f36de7bcb53459f7f5b2bd2fc41ec4f6c9ebd8922553f"} Jan 30 11:11:50 crc kubenswrapper[4869]: I0130 11:11:50.143523 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerStarted","Data":"e88f5b9e87740566349e22c0ad0a51c60c75ada4b7cc83a0dac45fffd0379d49"} Jan 30 11:11:50 crc kubenswrapper[4869]: I0130 11:11:50.457183 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-gm6nb-config-rs96f"] Jan 30 11:11:50 crc kubenswrapper[4869]: I0130 11:11:50.463616 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-gm6nb-config-rs96f"] Jan 30 11:11:51 crc kubenswrapper[4869]: I0130 11:11:51.769886 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:11:51 crc kubenswrapper[4869]: I0130 11:11:51.770222 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:11:51 crc kubenswrapper[4869]: I0130 11:11:51.770266 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 11:11:51 crc kubenswrapper[4869]: I0130 11:11:51.771063 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4ad08bd30e3f392ec90da3d9d390933399838d7e5f9e5bf41ffe126d804a3058"} pod="openshift-machine-config-operator/machine-config-daemon-99lr2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 11:11:51 crc kubenswrapper[4869]: I0130 11:11:51.771142 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" containerID="cri-o://4ad08bd30e3f392ec90da3d9d390933399838d7e5f9e5bf41ffe126d804a3058" gracePeriod=600 Jan 30 11:11:52 crc kubenswrapper[4869]: I0130 11:11:52.151214 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a1781d2-406c-4775-b67a-116815cde995" path="/var/lib/kubelet/pods/5a1781d2-406c-4775-b67a-116815cde995/volumes" Jan 30 11:11:52 crc kubenswrapper[4869]: I0130 11:11:52.161535 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerStarted","Data":"f8504fd0ff5794faf6bacd1fef665e7a9cd6fedeedff24e1b282e17c56837bea"} Jan 30 11:11:52 crc kubenswrapper[4869]: I0130 11:11:52.161582 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerStarted","Data":"97e9034f3bb0c1adcd5ddc73729cfb8a5551a31bba376f2f3e02026b27ed1f80"} Jan 30 11:11:52 crc kubenswrapper[4869]: I0130 11:11:52.161595 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerStarted","Data":"6f9aa9147f317463724ec1dff3a40f0f2085d959d4963346ddc72ee9e85fd348"} Jan 30 11:11:52 crc kubenswrapper[4869]: I0130 11:11:52.161607 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerStarted","Data":"5ef059d18651368542240591ca6f5fe4c03b5e8aa1d605e33f266d5c65f87088"} Jan 30 11:11:52 crc kubenswrapper[4869]: I0130 11:11:52.168196 4869 generic.go:334] "Generic (PLEG): container finished" podID="ef13186b-7f82-4025-97e3-d899be8c207f" containerID="4ad08bd30e3f392ec90da3d9d390933399838d7e5f9e5bf41ffe126d804a3058" exitCode=0 Jan 30 11:11:52 crc kubenswrapper[4869]: I0130 11:11:52.168364 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerDied","Data":"4ad08bd30e3f392ec90da3d9d390933399838d7e5f9e5bf41ffe126d804a3058"} Jan 30 11:11:52 crc kubenswrapper[4869]: I0130 11:11:52.168452 4869 scope.go:117] "RemoveContainer" containerID="c5808a41780bbab079aa25dfbf774b2278de2a0be2251b6799239067a10cf14f" Jan 30 11:11:53 crc kubenswrapper[4869]: I0130 11:11:53.178534 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerStarted","Data":"2d30cb107250bb27f981051333540bce0f94a645c35535aa9330fd41a7dff2ba"} Jan 30 11:11:53 crc kubenswrapper[4869]: I0130 11:11:53.186171 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerStarted","Data":"13ba9f6f7912e14e6780c387ebc1a2663d74dda7b49e064fb9abd88ab5e57f99"} Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.219501 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerStarted","Data":"e6f93331ee688f8b3d08b68521bc6870dc9ec3fe42c2459935fb484bbb47b43b"} Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.219957 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerStarted","Data":"65dd5e75cb2c7b06492f5521d624b519679d57118d8f7a7a48edaca957d584a1"} Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.261478 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=24.574594337 podStartE2EDuration="40.261455489s" podCreationTimestamp="2026-01-30 11:11:17 +0000 UTC" firstStartedPulling="2026-01-30 11:11:35.466192336 +0000 UTC m=+1046.016068402" lastFinishedPulling="2026-01-30 11:11:51.153053488 +0000 UTC m=+1061.702929554" observedRunningTime="2026-01-30 11:11:57.255765477 +0000 UTC m=+1067.805641563" watchObservedRunningTime="2026-01-30 11:11:57.261455489 +0000 UTC m=+1067.811331555" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.541524 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-bnwdm"] Jan 30 11:11:57 crc kubenswrapper[4869]: E0130 11:11:57.542205 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b9027f1-3da9-4ee0-a3bd-1041accd8f3b" containerName="mariadb-account-create-update" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.542225 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b9027f1-3da9-4ee0-a3bd-1041accd8f3b" containerName="mariadb-account-create-update" Jan 30 11:11:57 crc kubenswrapper[4869]: E0130 11:11:57.542257 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a1781d2-406c-4775-b67a-116815cde995" containerName="ovn-config" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.542265 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a1781d2-406c-4775-b67a-116815cde995" containerName="ovn-config" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.542420 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b9027f1-3da9-4ee0-a3bd-1041accd8f3b" containerName="mariadb-account-create-update" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.542444 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a1781d2-406c-4775-b67a-116815cde995" containerName="ovn-config" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.543266 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.545369 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.556574 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-bnwdm"] Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.653059 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-config\") pod \"dnsmasq-dns-5c79d794d7-bnwdm\" (UID: \"83bf5b74-32e0-4431-bea8-94ea95616a23\") " pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.653108 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-dns-svc\") pod \"dnsmasq-dns-5c79d794d7-bnwdm\" (UID: \"83bf5b74-32e0-4431-bea8-94ea95616a23\") " pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.653235 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-ovsdbserver-sb\") pod \"dnsmasq-dns-5c79d794d7-bnwdm\" (UID: \"83bf5b74-32e0-4431-bea8-94ea95616a23\") " pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.653380 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-ovsdbserver-nb\") pod \"dnsmasq-dns-5c79d794d7-bnwdm\" (UID: \"83bf5b74-32e0-4431-bea8-94ea95616a23\") " pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.653446 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-dns-swift-storage-0\") pod \"dnsmasq-dns-5c79d794d7-bnwdm\" (UID: \"83bf5b74-32e0-4431-bea8-94ea95616a23\") " pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.653483 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5vh5\" (UniqueName: \"kubernetes.io/projected/83bf5b74-32e0-4431-bea8-94ea95616a23-kube-api-access-m5vh5\") pod \"dnsmasq-dns-5c79d794d7-bnwdm\" (UID: \"83bf5b74-32e0-4431-bea8-94ea95616a23\") " pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.755037 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-ovsdbserver-nb\") pod \"dnsmasq-dns-5c79d794d7-bnwdm\" (UID: \"83bf5b74-32e0-4431-bea8-94ea95616a23\") " pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.755126 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-dns-swift-storage-0\") pod \"dnsmasq-dns-5c79d794d7-bnwdm\" (UID: \"83bf5b74-32e0-4431-bea8-94ea95616a23\") " pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.755156 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5vh5\" (UniqueName: \"kubernetes.io/projected/83bf5b74-32e0-4431-bea8-94ea95616a23-kube-api-access-m5vh5\") pod \"dnsmasq-dns-5c79d794d7-bnwdm\" (UID: \"83bf5b74-32e0-4431-bea8-94ea95616a23\") " pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.755181 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-config\") pod \"dnsmasq-dns-5c79d794d7-bnwdm\" (UID: \"83bf5b74-32e0-4431-bea8-94ea95616a23\") " pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.755208 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-dns-svc\") pod \"dnsmasq-dns-5c79d794d7-bnwdm\" (UID: \"83bf5b74-32e0-4431-bea8-94ea95616a23\") " pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.755240 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-ovsdbserver-sb\") pod \"dnsmasq-dns-5c79d794d7-bnwdm\" (UID: \"83bf5b74-32e0-4431-bea8-94ea95616a23\") " pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.756324 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-ovsdbserver-nb\") pod \"dnsmasq-dns-5c79d794d7-bnwdm\" (UID: \"83bf5b74-32e0-4431-bea8-94ea95616a23\") " pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.756414 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-dns-svc\") pod \"dnsmasq-dns-5c79d794d7-bnwdm\" (UID: \"83bf5b74-32e0-4431-bea8-94ea95616a23\") " pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.756427 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-dns-swift-storage-0\") pod \"dnsmasq-dns-5c79d794d7-bnwdm\" (UID: \"83bf5b74-32e0-4431-bea8-94ea95616a23\") " pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.756653 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-ovsdbserver-sb\") pod \"dnsmasq-dns-5c79d794d7-bnwdm\" (UID: \"83bf5b74-32e0-4431-bea8-94ea95616a23\") " pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.756653 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-config\") pod \"dnsmasq-dns-5c79d794d7-bnwdm\" (UID: \"83bf5b74-32e0-4431-bea8-94ea95616a23\") " pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.776700 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5vh5\" (UniqueName: \"kubernetes.io/projected/83bf5b74-32e0-4431-bea8-94ea95616a23-kube-api-access-m5vh5\") pod \"dnsmasq-dns-5c79d794d7-bnwdm\" (UID: \"83bf5b74-32e0-4431-bea8-94ea95616a23\") " pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" Jan 30 11:11:57 crc kubenswrapper[4869]: I0130 11:11:57.860884 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" Jan 30 11:11:58 crc kubenswrapper[4869]: I0130 11:11:58.300937 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-bnwdm"] Jan 30 11:11:59 crc kubenswrapper[4869]: I0130 11:11:59.235611 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" event={"ID":"83bf5b74-32e0-4431-bea8-94ea95616a23","Type":"ContainerStarted","Data":"3d2e483315346950bbff42d4c903c9480aa5f5b8002e25f5fa3b84522b01c715"} Jan 30 11:12:00 crc kubenswrapper[4869]: I0130 11:12:00.244628 4869 generic.go:334] "Generic (PLEG): container finished" podID="83bf5b74-32e0-4431-bea8-94ea95616a23" containerID="2d4a86660aa9a595f2bf205dcc09fbcfa32108e89bbfa1626addc8fe780b108d" exitCode=0 Jan 30 11:12:00 crc kubenswrapper[4869]: I0130 11:12:00.244693 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" event={"ID":"83bf5b74-32e0-4431-bea8-94ea95616a23","Type":"ContainerDied","Data":"2d4a86660aa9a595f2bf205dcc09fbcfa32108e89bbfa1626addc8fe780b108d"} Jan 30 11:12:01 crc kubenswrapper[4869]: I0130 11:12:01.253868 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" event={"ID":"83bf5b74-32e0-4431-bea8-94ea95616a23","Type":"ContainerStarted","Data":"9d24430108c3d27f60fbe21653ed006688f8781937df23067d161a0a603abe47"} Jan 30 11:12:01 crc kubenswrapper[4869]: I0130 11:12:01.254452 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" Jan 30 11:12:01 crc kubenswrapper[4869]: I0130 11:12:01.282112 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" podStartSLOduration=4.282081384 podStartE2EDuration="4.282081384s" podCreationTimestamp="2026-01-30 11:11:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:12:01.275036694 +0000 UTC m=+1071.824912750" watchObservedRunningTime="2026-01-30 11:12:01.282081384 +0000 UTC m=+1071.831957460" Jan 30 11:12:02 crc kubenswrapper[4869]: I0130 11:12:02.262132 4869 generic.go:334] "Generic (PLEG): container finished" podID="413090e9-1b8c-43a1-9550-150f0baf022f" containerID="1a899ce54a669e416e3efafc56286fab9b4ed702e1c2a4e1d21f491569dad96b" exitCode=0 Jan 30 11:12:02 crc kubenswrapper[4869]: I0130 11:12:02.262222 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-kcn5x" event={"ID":"413090e9-1b8c-43a1-9550-150f0baf022f","Type":"ContainerDied","Data":"1a899ce54a669e416e3efafc56286fab9b4ed702e1c2a4e1d21f491569dad96b"} Jan 30 11:12:02 crc kubenswrapper[4869]: I0130 11:12:02.715978 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Jan 30 11:12:02 crc kubenswrapper[4869]: I0130 11:12:02.793883 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.010409 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-lvfzc"] Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.011668 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lvfzc" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.067575 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-lvfzc"] Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.135251 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-jqsls"] Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.136595 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-jqsls" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.151454 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d55e229e-1e13-4cf5-9b86-32122704fa72-operator-scripts\") pod \"cinder-db-create-lvfzc\" (UID: \"d55e229e-1e13-4cf5-9b86-32122704fa72\") " pod="openstack/cinder-db-create-lvfzc" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.151504 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tfk4\" (UniqueName: \"kubernetes.io/projected/d55e229e-1e13-4cf5-9b86-32122704fa72-kube-api-access-4tfk4\") pod \"cinder-db-create-lvfzc\" (UID: \"d55e229e-1e13-4cf5-9b86-32122704fa72\") " pod="openstack/cinder-db-create-lvfzc" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.158942 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-70de-account-create-update-qkqf9"] Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.165121 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-70de-account-create-update-qkqf9" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.169457 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.176768 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-jqsls"] Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.193434 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-70de-account-create-update-qkqf9"] Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.243509 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-7037-account-create-update-46qmh"] Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.244847 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-7037-account-create-update-46qmh" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.251317 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-7037-account-create-update-46qmh"] Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.251666 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.252770 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1293c111-bc92-47ac-aaf0-ae153a289832-operator-scripts\") pod \"barbican-db-create-jqsls\" (UID: \"1293c111-bc92-47ac-aaf0-ae153a289832\") " pod="openstack/barbican-db-create-jqsls" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.254280 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d55e229e-1e13-4cf5-9b86-32122704fa72-operator-scripts\") pod \"cinder-db-create-lvfzc\" (UID: \"d55e229e-1e13-4cf5-9b86-32122704fa72\") " pod="openstack/cinder-db-create-lvfzc" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.255008 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d55e229e-1e13-4cf5-9b86-32122704fa72-operator-scripts\") pod \"cinder-db-create-lvfzc\" (UID: \"d55e229e-1e13-4cf5-9b86-32122704fa72\") " pod="openstack/cinder-db-create-lvfzc" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.255089 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tfk4\" (UniqueName: \"kubernetes.io/projected/d55e229e-1e13-4cf5-9b86-32122704fa72-kube-api-access-4tfk4\") pod \"cinder-db-create-lvfzc\" (UID: \"d55e229e-1e13-4cf5-9b86-32122704fa72\") " pod="openstack/cinder-db-create-lvfzc" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.256322 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dndn\" (UniqueName: \"kubernetes.io/projected/1293c111-bc92-47ac-aaf0-ae153a289832-kube-api-access-6dndn\") pod \"barbican-db-create-jqsls\" (UID: \"1293c111-bc92-47ac-aaf0-ae153a289832\") " pod="openstack/barbican-db-create-jqsls" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.278445 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tfk4\" (UniqueName: \"kubernetes.io/projected/d55e229e-1e13-4cf5-9b86-32122704fa72-kube-api-access-4tfk4\") pod \"cinder-db-create-lvfzc\" (UID: \"d55e229e-1e13-4cf5-9b86-32122704fa72\") " pod="openstack/cinder-db-create-lvfzc" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.329840 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lvfzc" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.357495 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dndn\" (UniqueName: \"kubernetes.io/projected/1293c111-bc92-47ac-aaf0-ae153a289832-kube-api-access-6dndn\") pod \"barbican-db-create-jqsls\" (UID: \"1293c111-bc92-47ac-aaf0-ae153a289832\") " pod="openstack/barbican-db-create-jqsls" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.357550 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1293c111-bc92-47ac-aaf0-ae153a289832-operator-scripts\") pod \"barbican-db-create-jqsls\" (UID: \"1293c111-bc92-47ac-aaf0-ae153a289832\") " pod="openstack/barbican-db-create-jqsls" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.357583 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wc4k\" (UniqueName: \"kubernetes.io/projected/b1c9c7cc-73fd-41bb-8418-126303047e6a-kube-api-access-2wc4k\") pod \"barbican-70de-account-create-update-qkqf9\" (UID: \"b1c9c7cc-73fd-41bb-8418-126303047e6a\") " pod="openstack/barbican-70de-account-create-update-qkqf9" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.357608 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1c9c7cc-73fd-41bb-8418-126303047e6a-operator-scripts\") pod \"barbican-70de-account-create-update-qkqf9\" (UID: \"b1c9c7cc-73fd-41bb-8418-126303047e6a\") " pod="openstack/barbican-70de-account-create-update-qkqf9" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.357683 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b-operator-scripts\") pod \"cinder-7037-account-create-update-46qmh\" (UID: \"fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b\") " pod="openstack/cinder-7037-account-create-update-46qmh" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.357737 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m87cj\" (UniqueName: \"kubernetes.io/projected/fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b-kube-api-access-m87cj\") pod \"cinder-7037-account-create-update-46qmh\" (UID: \"fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b\") " pod="openstack/cinder-7037-account-create-update-46qmh" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.358568 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1293c111-bc92-47ac-aaf0-ae153a289832-operator-scripts\") pod \"barbican-db-create-jqsls\" (UID: \"1293c111-bc92-47ac-aaf0-ae153a289832\") " pod="openstack/barbican-db-create-jqsls" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.390300 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dndn\" (UniqueName: \"kubernetes.io/projected/1293c111-bc92-47ac-aaf0-ae153a289832-kube-api-access-6dndn\") pod \"barbican-db-create-jqsls\" (UID: \"1293c111-bc92-47ac-aaf0-ae153a289832\") " pod="openstack/barbican-db-create-jqsls" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.460789 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b-operator-scripts\") pod \"cinder-7037-account-create-update-46qmh\" (UID: \"fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b\") " pod="openstack/cinder-7037-account-create-update-46qmh" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.460852 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m87cj\" (UniqueName: \"kubernetes.io/projected/fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b-kube-api-access-m87cj\") pod \"cinder-7037-account-create-update-46qmh\" (UID: \"fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b\") " pod="openstack/cinder-7037-account-create-update-46qmh" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.460950 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1c9c7cc-73fd-41bb-8418-126303047e6a-operator-scripts\") pod \"barbican-70de-account-create-update-qkqf9\" (UID: \"b1c9c7cc-73fd-41bb-8418-126303047e6a\") " pod="openstack/barbican-70de-account-create-update-qkqf9" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.460972 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wc4k\" (UniqueName: \"kubernetes.io/projected/b1c9c7cc-73fd-41bb-8418-126303047e6a-kube-api-access-2wc4k\") pod \"barbican-70de-account-create-update-qkqf9\" (UID: \"b1c9c7cc-73fd-41bb-8418-126303047e6a\") " pod="openstack/barbican-70de-account-create-update-qkqf9" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.462231 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b-operator-scripts\") pod \"cinder-7037-account-create-update-46qmh\" (UID: \"fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b\") " pod="openstack/cinder-7037-account-create-update-46qmh" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.462755 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1c9c7cc-73fd-41bb-8418-126303047e6a-operator-scripts\") pod \"barbican-70de-account-create-update-qkqf9\" (UID: \"b1c9c7cc-73fd-41bb-8418-126303047e6a\") " pod="openstack/barbican-70de-account-create-update-qkqf9" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.472109 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-jqsls" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.502230 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wc4k\" (UniqueName: \"kubernetes.io/projected/b1c9c7cc-73fd-41bb-8418-126303047e6a-kube-api-access-2wc4k\") pod \"barbican-70de-account-create-update-qkqf9\" (UID: \"b1c9c7cc-73fd-41bb-8418-126303047e6a\") " pod="openstack/barbican-70de-account-create-update-qkqf9" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.513981 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-70de-account-create-update-qkqf9" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.530392 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-k8rcl"] Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.531465 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-k8rcl" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.531858 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m87cj\" (UniqueName: \"kubernetes.io/projected/fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b-kube-api-access-m87cj\") pod \"cinder-7037-account-create-update-46qmh\" (UID: \"fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b\") " pod="openstack/cinder-7037-account-create-update-46qmh" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.536961 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.537655 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.537838 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-kldpt" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.541658 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.557551 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-k8rcl"] Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.571097 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-7037-account-create-update-46qmh" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.586973 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-6l5cw"] Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.589060 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-6l5cw" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.652772 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-8f65-account-create-update-mz5hf"] Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.655957 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8f65-account-create-update-mz5hf" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.661477 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.667568 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b6486d6-add2-4abe-8ccb-35517810f949-combined-ca-bundle\") pod \"keystone-db-sync-k8rcl\" (UID: \"1b6486d6-add2-4abe-8ccb-35517810f949\") " pod="openstack/keystone-db-sync-k8rcl" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.668586 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b6486d6-add2-4abe-8ccb-35517810f949-config-data\") pod \"keystone-db-sync-k8rcl\" (UID: \"1b6486d6-add2-4abe-8ccb-35517810f949\") " pod="openstack/keystone-db-sync-k8rcl" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.668728 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gg5hv\" (UniqueName: \"kubernetes.io/projected/1b6486d6-add2-4abe-8ccb-35517810f949-kube-api-access-gg5hv\") pod \"keystone-db-sync-k8rcl\" (UID: \"1b6486d6-add2-4abe-8ccb-35517810f949\") " pod="openstack/keystone-db-sync-k8rcl" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.668818 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-6l5cw"] Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.710796 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-8f65-account-create-update-mz5hf"] Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.769886 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b6486d6-add2-4abe-8ccb-35517810f949-combined-ca-bundle\") pod \"keystone-db-sync-k8rcl\" (UID: \"1b6486d6-add2-4abe-8ccb-35517810f949\") " pod="openstack/keystone-db-sync-k8rcl" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.769974 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b6486d6-add2-4abe-8ccb-35517810f949-config-data\") pod \"keystone-db-sync-k8rcl\" (UID: \"1b6486d6-add2-4abe-8ccb-35517810f949\") " pod="openstack/keystone-db-sync-k8rcl" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.770054 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ckz6\" (UniqueName: \"kubernetes.io/projected/35a2d1cd-798d-4e72-a4fc-b0609e30f714-kube-api-access-6ckz6\") pod \"neutron-8f65-account-create-update-mz5hf\" (UID: \"35a2d1cd-798d-4e72-a4fc-b0609e30f714\") " pod="openstack/neutron-8f65-account-create-update-mz5hf" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.770083 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0dc6721d-cb72-45ad-ad97-1c045d0bd2cb-operator-scripts\") pod \"neutron-db-create-6l5cw\" (UID: \"0dc6721d-cb72-45ad-ad97-1c045d0bd2cb\") " pod="openstack/neutron-db-create-6l5cw" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.770137 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wd7zh\" (UniqueName: \"kubernetes.io/projected/0dc6721d-cb72-45ad-ad97-1c045d0bd2cb-kube-api-access-wd7zh\") pod \"neutron-db-create-6l5cw\" (UID: \"0dc6721d-cb72-45ad-ad97-1c045d0bd2cb\") " pod="openstack/neutron-db-create-6l5cw" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.770173 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gg5hv\" (UniqueName: \"kubernetes.io/projected/1b6486d6-add2-4abe-8ccb-35517810f949-kube-api-access-gg5hv\") pod \"keystone-db-sync-k8rcl\" (UID: \"1b6486d6-add2-4abe-8ccb-35517810f949\") " pod="openstack/keystone-db-sync-k8rcl" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.770232 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35a2d1cd-798d-4e72-a4fc-b0609e30f714-operator-scripts\") pod \"neutron-8f65-account-create-update-mz5hf\" (UID: \"35a2d1cd-798d-4e72-a4fc-b0609e30f714\") " pod="openstack/neutron-8f65-account-create-update-mz5hf" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.787897 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b6486d6-add2-4abe-8ccb-35517810f949-combined-ca-bundle\") pod \"keystone-db-sync-k8rcl\" (UID: \"1b6486d6-add2-4abe-8ccb-35517810f949\") " pod="openstack/keystone-db-sync-k8rcl" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.791658 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b6486d6-add2-4abe-8ccb-35517810f949-config-data\") pod \"keystone-db-sync-k8rcl\" (UID: \"1b6486d6-add2-4abe-8ccb-35517810f949\") " pod="openstack/keystone-db-sync-k8rcl" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.806362 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gg5hv\" (UniqueName: \"kubernetes.io/projected/1b6486d6-add2-4abe-8ccb-35517810f949-kube-api-access-gg5hv\") pod \"keystone-db-sync-k8rcl\" (UID: \"1b6486d6-add2-4abe-8ccb-35517810f949\") " pod="openstack/keystone-db-sync-k8rcl" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.855942 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-k8rcl" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.871958 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ckz6\" (UniqueName: \"kubernetes.io/projected/35a2d1cd-798d-4e72-a4fc-b0609e30f714-kube-api-access-6ckz6\") pod \"neutron-8f65-account-create-update-mz5hf\" (UID: \"35a2d1cd-798d-4e72-a4fc-b0609e30f714\") " pod="openstack/neutron-8f65-account-create-update-mz5hf" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.872013 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0dc6721d-cb72-45ad-ad97-1c045d0bd2cb-operator-scripts\") pod \"neutron-db-create-6l5cw\" (UID: \"0dc6721d-cb72-45ad-ad97-1c045d0bd2cb\") " pod="openstack/neutron-db-create-6l5cw" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.872075 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wd7zh\" (UniqueName: \"kubernetes.io/projected/0dc6721d-cb72-45ad-ad97-1c045d0bd2cb-kube-api-access-wd7zh\") pod \"neutron-db-create-6l5cw\" (UID: \"0dc6721d-cb72-45ad-ad97-1c045d0bd2cb\") " pod="openstack/neutron-db-create-6l5cw" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.872149 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35a2d1cd-798d-4e72-a4fc-b0609e30f714-operator-scripts\") pod \"neutron-8f65-account-create-update-mz5hf\" (UID: \"35a2d1cd-798d-4e72-a4fc-b0609e30f714\") " pod="openstack/neutron-8f65-account-create-update-mz5hf" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.873235 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0dc6721d-cb72-45ad-ad97-1c045d0bd2cb-operator-scripts\") pod \"neutron-db-create-6l5cw\" (UID: \"0dc6721d-cb72-45ad-ad97-1c045d0bd2cb\") " pod="openstack/neutron-db-create-6l5cw" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.874114 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35a2d1cd-798d-4e72-a4fc-b0609e30f714-operator-scripts\") pod \"neutron-8f65-account-create-update-mz5hf\" (UID: \"35a2d1cd-798d-4e72-a4fc-b0609e30f714\") " pod="openstack/neutron-8f65-account-create-update-mz5hf" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.889671 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wd7zh\" (UniqueName: \"kubernetes.io/projected/0dc6721d-cb72-45ad-ad97-1c045d0bd2cb-kube-api-access-wd7zh\") pod \"neutron-db-create-6l5cw\" (UID: \"0dc6721d-cb72-45ad-ad97-1c045d0bd2cb\") " pod="openstack/neutron-db-create-6l5cw" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.889671 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ckz6\" (UniqueName: \"kubernetes.io/projected/35a2d1cd-798d-4e72-a4fc-b0609e30f714-kube-api-access-6ckz6\") pod \"neutron-8f65-account-create-update-mz5hf\" (UID: \"35a2d1cd-798d-4e72-a4fc-b0609e30f714\") " pod="openstack/neutron-8f65-account-create-update-mz5hf" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.922837 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-kcn5x" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.944656 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-lvfzc"] Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.950184 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-6l5cw" Jan 30 11:12:03 crc kubenswrapper[4869]: I0130 11:12:03.994748 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8f65-account-create-update-mz5hf" Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.074542 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/413090e9-1b8c-43a1-9550-150f0baf022f-combined-ca-bundle\") pod \"413090e9-1b8c-43a1-9550-150f0baf022f\" (UID: \"413090e9-1b8c-43a1-9550-150f0baf022f\") " Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.074591 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/413090e9-1b8c-43a1-9550-150f0baf022f-db-sync-config-data\") pod \"413090e9-1b8c-43a1-9550-150f0baf022f\" (UID: \"413090e9-1b8c-43a1-9550-150f0baf022f\") " Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.074622 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6nvh\" (UniqueName: \"kubernetes.io/projected/413090e9-1b8c-43a1-9550-150f0baf022f-kube-api-access-x6nvh\") pod \"413090e9-1b8c-43a1-9550-150f0baf022f\" (UID: \"413090e9-1b8c-43a1-9550-150f0baf022f\") " Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.074719 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/413090e9-1b8c-43a1-9550-150f0baf022f-config-data\") pod \"413090e9-1b8c-43a1-9550-150f0baf022f\" (UID: \"413090e9-1b8c-43a1-9550-150f0baf022f\") " Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.085997 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/413090e9-1b8c-43a1-9550-150f0baf022f-kube-api-access-x6nvh" (OuterVolumeSpecName: "kube-api-access-x6nvh") pod "413090e9-1b8c-43a1-9550-150f0baf022f" (UID: "413090e9-1b8c-43a1-9550-150f0baf022f"). InnerVolumeSpecName "kube-api-access-x6nvh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.086630 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/413090e9-1b8c-43a1-9550-150f0baf022f-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "413090e9-1b8c-43a1-9550-150f0baf022f" (UID: "413090e9-1b8c-43a1-9550-150f0baf022f"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.113482 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/413090e9-1b8c-43a1-9550-150f0baf022f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "413090e9-1b8c-43a1-9550-150f0baf022f" (UID: "413090e9-1b8c-43a1-9550-150f0baf022f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.151773 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/413090e9-1b8c-43a1-9550-150f0baf022f-config-data" (OuterVolumeSpecName: "config-data") pod "413090e9-1b8c-43a1-9550-150f0baf022f" (UID: "413090e9-1b8c-43a1-9550-150f0baf022f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.155456 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-jqsls"] Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.176642 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/413090e9-1b8c-43a1-9550-150f0baf022f-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.176670 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/413090e9-1b8c-43a1-9550-150f0baf022f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.176685 4869 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/413090e9-1b8c-43a1-9550-150f0baf022f-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.176700 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6nvh\" (UniqueName: \"kubernetes.io/projected/413090e9-1b8c-43a1-9550-150f0baf022f-kube-api-access-x6nvh\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.241392 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-7037-account-create-update-46qmh"] Jan 30 11:12:04 crc kubenswrapper[4869]: W0130 11:12:04.245911 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfb7f72ff_fc2f_48ee_9fd9_ed0b2e75295b.slice/crio-5ec23fcbba74e32da7588f9f57492e531174a13341f7a00f4958ac2d24649d5c WatchSource:0}: Error finding container 5ec23fcbba74e32da7588f9f57492e531174a13341f7a00f4958ac2d24649d5c: Status 404 returned error can't find the container with id 5ec23fcbba74e32da7588f9f57492e531174a13341f7a00f4958ac2d24649d5c Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.262648 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-70de-account-create-update-qkqf9"] Jan 30 11:12:04 crc kubenswrapper[4869]: W0130 11:12:04.266258 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1c9c7cc_73fd_41bb_8418_126303047e6a.slice/crio-91798be69dad5040bf11e8206c0b12c6b550e08d212192987196b9fadbcab41f WatchSource:0}: Error finding container 91798be69dad5040bf11e8206c0b12c6b550e08d212192987196b9fadbcab41f: Status 404 returned error can't find the container with id 91798be69dad5040bf11e8206c0b12c6b550e08d212192987196b9fadbcab41f Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.316220 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-70de-account-create-update-qkqf9" event={"ID":"b1c9c7cc-73fd-41bb-8418-126303047e6a","Type":"ContainerStarted","Data":"91798be69dad5040bf11e8206c0b12c6b550e08d212192987196b9fadbcab41f"} Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.318259 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-kcn5x" event={"ID":"413090e9-1b8c-43a1-9550-150f0baf022f","Type":"ContainerDied","Data":"0e9625b65ec8bfa0a885fb33e2b9eb87b5a029de4863badc5c40f751f1eccda0"} Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.318290 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-kcn5x" Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.318293 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e9625b65ec8bfa0a885fb33e2b9eb87b5a029de4863badc5c40f751f1eccda0" Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.361606 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-jqsls" event={"ID":"1293c111-bc92-47ac-aaf0-ae153a289832","Type":"ContainerStarted","Data":"ab847d4cb0929adad3dd2666a66e093f420d3ccbce8084d0b8ea46360a8286ed"} Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.363344 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-lvfzc" event={"ID":"d55e229e-1e13-4cf5-9b86-32122704fa72","Type":"ContainerStarted","Data":"29dfd9cf1cbd1da829ec5f39674e87b03d77f8127500a99c65f6230faca4edba"} Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.363396 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-lvfzc" event={"ID":"d55e229e-1e13-4cf5-9b86-32122704fa72","Type":"ContainerStarted","Data":"91e27d3427a22499ffb2641108462c099d9bd88cee0d3eaffea4635da7fed38d"} Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.367875 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-7037-account-create-update-46qmh" event={"ID":"fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b","Type":"ContainerStarted","Data":"5ec23fcbba74e32da7588f9f57492e531174a13341f7a00f4958ac2d24649d5c"} Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.402589 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-lvfzc" podStartSLOduration=2.4025660699999998 podStartE2EDuration="2.40256607s" podCreationTimestamp="2026-01-30 11:12:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:12:04.399226306 +0000 UTC m=+1074.949102382" watchObservedRunningTime="2026-01-30 11:12:04.40256607 +0000 UTC m=+1074.952442136" Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.432597 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-k8rcl"] Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.504873 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-8f65-account-create-update-mz5hf"] Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.556918 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-6l5cw"] Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.770483 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-bnwdm"] Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.770692 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" podUID="83bf5b74-32e0-4431-bea8-94ea95616a23" containerName="dnsmasq-dns" containerID="cri-o://9d24430108c3d27f60fbe21653ed006688f8781937df23067d161a0a603abe47" gracePeriod=10 Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.809584 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-knzpq"] Jan 30 11:12:04 crc kubenswrapper[4869]: E0130 11:12:04.814783 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="413090e9-1b8c-43a1-9550-150f0baf022f" containerName="glance-db-sync" Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.814814 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="413090e9-1b8c-43a1-9550-150f0baf022f" containerName="glance-db-sync" Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.815001 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="413090e9-1b8c-43a1-9550-150f0baf022f" containerName="glance-db-sync" Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.815887 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" Jan 30 11:12:04 crc kubenswrapper[4869]: I0130 11:12:04.829752 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-knzpq"] Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.000133 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xn6s\" (UniqueName: \"kubernetes.io/projected/532aee83-19a5-40bc-8f60-2cd3e3f662c1-kube-api-access-9xn6s\") pod \"dnsmasq-dns-5f59b8f679-knzpq\" (UID: \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\") " pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.000500 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-dns-swift-storage-0\") pod \"dnsmasq-dns-5f59b8f679-knzpq\" (UID: \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\") " pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.000600 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-dns-svc\") pod \"dnsmasq-dns-5f59b8f679-knzpq\" (UID: \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\") " pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.000648 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-ovsdbserver-nb\") pod \"dnsmasq-dns-5f59b8f679-knzpq\" (UID: \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\") " pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.000695 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-ovsdbserver-sb\") pod \"dnsmasq-dns-5f59b8f679-knzpq\" (UID: \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\") " pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.000745 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-config\") pod \"dnsmasq-dns-5f59b8f679-knzpq\" (UID: \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\") " pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.104083 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-dns-svc\") pod \"dnsmasq-dns-5f59b8f679-knzpq\" (UID: \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\") " pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.104155 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-ovsdbserver-nb\") pod \"dnsmasq-dns-5f59b8f679-knzpq\" (UID: \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\") " pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.104197 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-ovsdbserver-sb\") pod \"dnsmasq-dns-5f59b8f679-knzpq\" (UID: \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\") " pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.104225 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-config\") pod \"dnsmasq-dns-5f59b8f679-knzpq\" (UID: \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\") " pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.104301 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xn6s\" (UniqueName: \"kubernetes.io/projected/532aee83-19a5-40bc-8f60-2cd3e3f662c1-kube-api-access-9xn6s\") pod \"dnsmasq-dns-5f59b8f679-knzpq\" (UID: \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\") " pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.104326 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-dns-swift-storage-0\") pod \"dnsmasq-dns-5f59b8f679-knzpq\" (UID: \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\") " pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.105455 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-dns-swift-storage-0\") pod \"dnsmasq-dns-5f59b8f679-knzpq\" (UID: \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\") " pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.106189 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-dns-svc\") pod \"dnsmasq-dns-5f59b8f679-knzpq\" (UID: \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\") " pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.106908 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-ovsdbserver-nb\") pod \"dnsmasq-dns-5f59b8f679-knzpq\" (UID: \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\") " pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.107560 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-ovsdbserver-sb\") pod \"dnsmasq-dns-5f59b8f679-knzpq\" (UID: \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\") " pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.117572 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-config\") pod \"dnsmasq-dns-5f59b8f679-knzpq\" (UID: \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\") " pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.135361 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xn6s\" (UniqueName: \"kubernetes.io/projected/532aee83-19a5-40bc-8f60-2cd3e3f662c1-kube-api-access-9xn6s\") pod \"dnsmasq-dns-5f59b8f679-knzpq\" (UID: \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\") " pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.265127 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.267902 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.379254 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-k8rcl" event={"ID":"1b6486d6-add2-4abe-8ccb-35517810f949","Type":"ContainerStarted","Data":"b937c26e8183b10cb5b4f6c82f0582b388d135f43144693626c8b73d6d331e88"} Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.381264 4869 generic.go:334] "Generic (PLEG): container finished" podID="1293c111-bc92-47ac-aaf0-ae153a289832" containerID="57ecd3103f17790e1e95ee6aac684d14a887854c2c1aa6515e8fe0bef4d0a5ba" exitCode=0 Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.381325 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-jqsls" event={"ID":"1293c111-bc92-47ac-aaf0-ae153a289832","Type":"ContainerDied","Data":"57ecd3103f17790e1e95ee6aac684d14a887854c2c1aa6515e8fe0bef4d0a5ba"} Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.384032 4869 generic.go:334] "Generic (PLEG): container finished" podID="d55e229e-1e13-4cf5-9b86-32122704fa72" containerID="29dfd9cf1cbd1da829ec5f39674e87b03d77f8127500a99c65f6230faca4edba" exitCode=0 Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.384094 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-lvfzc" event={"ID":"d55e229e-1e13-4cf5-9b86-32122704fa72","Type":"ContainerDied","Data":"29dfd9cf1cbd1da829ec5f39674e87b03d77f8127500a99c65f6230faca4edba"} Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.385345 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-7037-account-create-update-46qmh" event={"ID":"fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b","Type":"ContainerStarted","Data":"e03176144d3d2b9313e5fbda7411d00b5f5bb98b3f758dbfeda55c37b4b51417"} Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.394148 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-70de-account-create-update-qkqf9" event={"ID":"b1c9c7cc-73fd-41bb-8418-126303047e6a","Type":"ContainerStarted","Data":"782d83205eace0b4d457f7add8f76d44c15bd4e233db6a43dd415a838973c7aa"} Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.409458 4869 generic.go:334] "Generic (PLEG): container finished" podID="83bf5b74-32e0-4431-bea8-94ea95616a23" containerID="9d24430108c3d27f60fbe21653ed006688f8781937df23067d161a0a603abe47" exitCode=0 Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.409603 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.410158 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" event={"ID":"83bf5b74-32e0-4431-bea8-94ea95616a23","Type":"ContainerDied","Data":"9d24430108c3d27f60fbe21653ed006688f8781937df23067d161a0a603abe47"} Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.410202 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-bnwdm" event={"ID":"83bf5b74-32e0-4431-bea8-94ea95616a23","Type":"ContainerDied","Data":"3d2e483315346950bbff42d4c903c9480aa5f5b8002e25f5fa3b84522b01c715"} Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.410224 4869 scope.go:117] "RemoveContainer" containerID="9d24430108c3d27f60fbe21653ed006688f8781937df23067d161a0a603abe47" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.417611 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-dns-svc\") pod \"83bf5b74-32e0-4431-bea8-94ea95616a23\" (UID: \"83bf5b74-32e0-4431-bea8-94ea95616a23\") " Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.417673 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m5vh5\" (UniqueName: \"kubernetes.io/projected/83bf5b74-32e0-4431-bea8-94ea95616a23-kube-api-access-m5vh5\") pod \"83bf5b74-32e0-4431-bea8-94ea95616a23\" (UID: \"83bf5b74-32e0-4431-bea8-94ea95616a23\") " Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.417723 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-ovsdbserver-sb\") pod \"83bf5b74-32e0-4431-bea8-94ea95616a23\" (UID: \"83bf5b74-32e0-4431-bea8-94ea95616a23\") " Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.417772 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-ovsdbserver-nb\") pod \"83bf5b74-32e0-4431-bea8-94ea95616a23\" (UID: \"83bf5b74-32e0-4431-bea8-94ea95616a23\") " Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.417874 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-config\") pod \"83bf5b74-32e0-4431-bea8-94ea95616a23\" (UID: \"83bf5b74-32e0-4431-bea8-94ea95616a23\") " Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.417928 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-dns-swift-storage-0\") pod \"83bf5b74-32e0-4431-bea8-94ea95616a23\" (UID: \"83bf5b74-32e0-4431-bea8-94ea95616a23\") " Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.431999 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-70de-account-create-update-qkqf9" podStartSLOduration=2.431974598 podStartE2EDuration="2.431974598s" podCreationTimestamp="2026-01-30 11:12:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:12:05.420313677 +0000 UTC m=+1075.970189763" watchObservedRunningTime="2026-01-30 11:12:05.431974598 +0000 UTC m=+1075.981850664" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.460483 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-6l5cw" event={"ID":"0dc6721d-cb72-45ad-ad97-1c045d0bd2cb","Type":"ContainerStarted","Data":"b1c838bbede5bc60bbbb80d7295da7bc641b43ddd0028dc86a903f5a803730ce"} Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.460521 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-6l5cw" event={"ID":"0dc6721d-cb72-45ad-ad97-1c045d0bd2cb","Type":"ContainerStarted","Data":"7a4080031a92d9a1ca3379533a4142321fbe5a651ae27a1bce1bb95d93dbcb36"} Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.475364 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8f65-account-create-update-mz5hf" event={"ID":"35a2d1cd-798d-4e72-a4fc-b0609e30f714","Type":"ContainerStarted","Data":"7ee90326a4c74e4c96e0ebf6f541fb48ad1a22d16977717912a57be333a828f4"} Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.475404 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8f65-account-create-update-mz5hf" event={"ID":"35a2d1cd-798d-4e72-a4fc-b0609e30f714","Type":"ContainerStarted","Data":"5bbaae5665a6a6ec9f65eb2073f521ba71ae0c8c942676bb88e773bddf82f251"} Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.478307 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83bf5b74-32e0-4431-bea8-94ea95616a23-kube-api-access-m5vh5" (OuterVolumeSpecName: "kube-api-access-m5vh5") pod "83bf5b74-32e0-4431-bea8-94ea95616a23" (UID: "83bf5b74-32e0-4431-bea8-94ea95616a23"). InnerVolumeSpecName "kube-api-access-m5vh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.492058 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "83bf5b74-32e0-4431-bea8-94ea95616a23" (UID: "83bf5b74-32e0-4431-bea8-94ea95616a23"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.502071 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "83bf5b74-32e0-4431-bea8-94ea95616a23" (UID: "83bf5b74-32e0-4431-bea8-94ea95616a23"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.525578 4869 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.525613 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m5vh5\" (UniqueName: \"kubernetes.io/projected/83bf5b74-32e0-4431-bea8-94ea95616a23-kube-api-access-m5vh5\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.525624 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.526608 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-7037-account-create-update-46qmh" podStartSLOduration=2.5265857929999997 podStartE2EDuration="2.526585793s" podCreationTimestamp="2026-01-30 11:12:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:12:05.467056923 +0000 UTC m=+1076.016932999" watchObservedRunningTime="2026-01-30 11:12:05.526585793 +0000 UTC m=+1076.076461859" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.553588 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "83bf5b74-32e0-4431-bea8-94ea95616a23" (UID: "83bf5b74-32e0-4431-bea8-94ea95616a23"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.559139 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-config" (OuterVolumeSpecName: "config") pod "83bf5b74-32e0-4431-bea8-94ea95616a23" (UID: "83bf5b74-32e0-4431-bea8-94ea95616a23"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.566431 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-8f65-account-create-update-mz5hf" podStartSLOduration=2.566410893 podStartE2EDuration="2.566410893s" podCreationTimestamp="2026-01-30 11:12:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:12:05.505296609 +0000 UTC m=+1076.055172685" watchObservedRunningTime="2026-01-30 11:12:05.566410893 +0000 UTC m=+1076.116286959" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.589381 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-6l5cw" podStartSLOduration=2.589357615 podStartE2EDuration="2.589357615s" podCreationTimestamp="2026-01-30 11:12:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:12:05.541728413 +0000 UTC m=+1076.091604499" watchObservedRunningTime="2026-01-30 11:12:05.589357615 +0000 UTC m=+1076.139233681" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.619193 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "83bf5b74-32e0-4431-bea8-94ea95616a23" (UID: "83bf5b74-32e0-4431-bea8-94ea95616a23"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.626676 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.626718 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.626729 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/83bf5b74-32e0-4431-bea8-94ea95616a23-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.774573 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-bnwdm"] Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.776560 4869 scope.go:117] "RemoveContainer" containerID="2d4a86660aa9a595f2bf205dcc09fbcfa32108e89bbfa1626addc8fe780b108d" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.787394 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-bnwdm"] Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.850460 4869 scope.go:117] "RemoveContainer" containerID="9d24430108c3d27f60fbe21653ed006688f8781937df23067d161a0a603abe47" Jan 30 11:12:05 crc kubenswrapper[4869]: E0130 11:12:05.851152 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d24430108c3d27f60fbe21653ed006688f8781937df23067d161a0a603abe47\": container with ID starting with 9d24430108c3d27f60fbe21653ed006688f8781937df23067d161a0a603abe47 not found: ID does not exist" containerID="9d24430108c3d27f60fbe21653ed006688f8781937df23067d161a0a603abe47" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.851209 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d24430108c3d27f60fbe21653ed006688f8781937df23067d161a0a603abe47"} err="failed to get container status \"9d24430108c3d27f60fbe21653ed006688f8781937df23067d161a0a603abe47\": rpc error: code = NotFound desc = could not find container \"9d24430108c3d27f60fbe21653ed006688f8781937df23067d161a0a603abe47\": container with ID starting with 9d24430108c3d27f60fbe21653ed006688f8781937df23067d161a0a603abe47 not found: ID does not exist" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.851244 4869 scope.go:117] "RemoveContainer" containerID="2d4a86660aa9a595f2bf205dcc09fbcfa32108e89bbfa1626addc8fe780b108d" Jan 30 11:12:05 crc kubenswrapper[4869]: E0130 11:12:05.851804 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d4a86660aa9a595f2bf205dcc09fbcfa32108e89bbfa1626addc8fe780b108d\": container with ID starting with 2d4a86660aa9a595f2bf205dcc09fbcfa32108e89bbfa1626addc8fe780b108d not found: ID does not exist" containerID="2d4a86660aa9a595f2bf205dcc09fbcfa32108e89bbfa1626addc8fe780b108d" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.851834 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d4a86660aa9a595f2bf205dcc09fbcfa32108e89bbfa1626addc8fe780b108d"} err="failed to get container status \"2d4a86660aa9a595f2bf205dcc09fbcfa32108e89bbfa1626addc8fe780b108d\": rpc error: code = NotFound desc = could not find container \"2d4a86660aa9a595f2bf205dcc09fbcfa32108e89bbfa1626addc8fe780b108d\": container with ID starting with 2d4a86660aa9a595f2bf205dcc09fbcfa32108e89bbfa1626addc8fe780b108d not found: ID does not exist" Jan 30 11:12:05 crc kubenswrapper[4869]: I0130 11:12:05.877338 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-knzpq"] Jan 30 11:12:06 crc kubenswrapper[4869]: I0130 11:12:06.143120 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83bf5b74-32e0-4431-bea8-94ea95616a23" path="/var/lib/kubelet/pods/83bf5b74-32e0-4431-bea8-94ea95616a23/volumes" Jan 30 11:12:06 crc kubenswrapper[4869]: I0130 11:12:06.484934 4869 generic.go:334] "Generic (PLEG): container finished" podID="0dc6721d-cb72-45ad-ad97-1c045d0bd2cb" containerID="b1c838bbede5bc60bbbb80d7295da7bc641b43ddd0028dc86a903f5a803730ce" exitCode=0 Jan 30 11:12:06 crc kubenswrapper[4869]: I0130 11:12:06.484977 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-6l5cw" event={"ID":"0dc6721d-cb72-45ad-ad97-1c045d0bd2cb","Type":"ContainerDied","Data":"b1c838bbede5bc60bbbb80d7295da7bc641b43ddd0028dc86a903f5a803730ce"} Jan 30 11:12:06 crc kubenswrapper[4869]: I0130 11:12:06.488037 4869 generic.go:334] "Generic (PLEG): container finished" podID="35a2d1cd-798d-4e72-a4fc-b0609e30f714" containerID="7ee90326a4c74e4c96e0ebf6f541fb48ad1a22d16977717912a57be333a828f4" exitCode=0 Jan 30 11:12:06 crc kubenswrapper[4869]: I0130 11:12:06.488117 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8f65-account-create-update-mz5hf" event={"ID":"35a2d1cd-798d-4e72-a4fc-b0609e30f714","Type":"ContainerDied","Data":"7ee90326a4c74e4c96e0ebf6f541fb48ad1a22d16977717912a57be333a828f4"} Jan 30 11:12:06 crc kubenswrapper[4869]: I0130 11:12:06.489941 4869 generic.go:334] "Generic (PLEG): container finished" podID="fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b" containerID="e03176144d3d2b9313e5fbda7411d00b5f5bb98b3f758dbfeda55c37b4b51417" exitCode=0 Jan 30 11:12:06 crc kubenswrapper[4869]: I0130 11:12:06.489997 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-7037-account-create-update-46qmh" event={"ID":"fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b","Type":"ContainerDied","Data":"e03176144d3d2b9313e5fbda7411d00b5f5bb98b3f758dbfeda55c37b4b51417"} Jan 30 11:12:06 crc kubenswrapper[4869]: I0130 11:12:06.492293 4869 generic.go:334] "Generic (PLEG): container finished" podID="532aee83-19a5-40bc-8f60-2cd3e3f662c1" containerID="086101cbe2d42f584fb4117f2e776b752bc72e03503e92d1caa28e3ccf675d17" exitCode=0 Jan 30 11:12:06 crc kubenswrapper[4869]: I0130 11:12:06.492358 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" event={"ID":"532aee83-19a5-40bc-8f60-2cd3e3f662c1","Type":"ContainerDied","Data":"086101cbe2d42f584fb4117f2e776b752bc72e03503e92d1caa28e3ccf675d17"} Jan 30 11:12:06 crc kubenswrapper[4869]: I0130 11:12:06.492377 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" event={"ID":"532aee83-19a5-40bc-8f60-2cd3e3f662c1","Type":"ContainerStarted","Data":"b037c940c6944a5d566d9f1c2b479a8a1379ebdc782eda90054b7dcf58e6c114"} Jan 30 11:12:06 crc kubenswrapper[4869]: I0130 11:12:06.494095 4869 generic.go:334] "Generic (PLEG): container finished" podID="b1c9c7cc-73fd-41bb-8418-126303047e6a" containerID="782d83205eace0b4d457f7add8f76d44c15bd4e233db6a43dd415a838973c7aa" exitCode=0 Jan 30 11:12:06 crc kubenswrapper[4869]: I0130 11:12:06.494118 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-70de-account-create-update-qkqf9" event={"ID":"b1c9c7cc-73fd-41bb-8418-126303047e6a","Type":"ContainerDied","Data":"782d83205eace0b4d457f7add8f76d44c15bd4e233db6a43dd415a838973c7aa"} Jan 30 11:12:06 crc kubenswrapper[4869]: I0130 11:12:06.901649 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lvfzc" Jan 30 11:12:06 crc kubenswrapper[4869]: I0130 11:12:06.959415 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-jqsls" Jan 30 11:12:07 crc kubenswrapper[4869]: I0130 11:12:07.062704 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1293c111-bc92-47ac-aaf0-ae153a289832-operator-scripts\") pod \"1293c111-bc92-47ac-aaf0-ae153a289832\" (UID: \"1293c111-bc92-47ac-aaf0-ae153a289832\") " Jan 30 11:12:07 crc kubenswrapper[4869]: I0130 11:12:07.063077 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dndn\" (UniqueName: \"kubernetes.io/projected/1293c111-bc92-47ac-aaf0-ae153a289832-kube-api-access-6dndn\") pod \"1293c111-bc92-47ac-aaf0-ae153a289832\" (UID: \"1293c111-bc92-47ac-aaf0-ae153a289832\") " Jan 30 11:12:07 crc kubenswrapper[4869]: I0130 11:12:07.063178 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4tfk4\" (UniqueName: \"kubernetes.io/projected/d55e229e-1e13-4cf5-9b86-32122704fa72-kube-api-access-4tfk4\") pod \"d55e229e-1e13-4cf5-9b86-32122704fa72\" (UID: \"d55e229e-1e13-4cf5-9b86-32122704fa72\") " Jan 30 11:12:07 crc kubenswrapper[4869]: I0130 11:12:07.063239 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1293c111-bc92-47ac-aaf0-ae153a289832-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1293c111-bc92-47ac-aaf0-ae153a289832" (UID: "1293c111-bc92-47ac-aaf0-ae153a289832"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:07 crc kubenswrapper[4869]: I0130 11:12:07.063265 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d55e229e-1e13-4cf5-9b86-32122704fa72-operator-scripts\") pod \"d55e229e-1e13-4cf5-9b86-32122704fa72\" (UID: \"d55e229e-1e13-4cf5-9b86-32122704fa72\") " Jan 30 11:12:07 crc kubenswrapper[4869]: I0130 11:12:07.063825 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1293c111-bc92-47ac-aaf0-ae153a289832-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:07 crc kubenswrapper[4869]: I0130 11:12:07.064228 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d55e229e-1e13-4cf5-9b86-32122704fa72-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d55e229e-1e13-4cf5-9b86-32122704fa72" (UID: "d55e229e-1e13-4cf5-9b86-32122704fa72"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:07 crc kubenswrapper[4869]: I0130 11:12:07.067991 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d55e229e-1e13-4cf5-9b86-32122704fa72-kube-api-access-4tfk4" (OuterVolumeSpecName: "kube-api-access-4tfk4") pod "d55e229e-1e13-4cf5-9b86-32122704fa72" (UID: "d55e229e-1e13-4cf5-9b86-32122704fa72"). InnerVolumeSpecName "kube-api-access-4tfk4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:12:07 crc kubenswrapper[4869]: I0130 11:12:07.068264 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1293c111-bc92-47ac-aaf0-ae153a289832-kube-api-access-6dndn" (OuterVolumeSpecName: "kube-api-access-6dndn") pod "1293c111-bc92-47ac-aaf0-ae153a289832" (UID: "1293c111-bc92-47ac-aaf0-ae153a289832"). InnerVolumeSpecName "kube-api-access-6dndn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:12:07 crc kubenswrapper[4869]: I0130 11:12:07.165832 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dndn\" (UniqueName: \"kubernetes.io/projected/1293c111-bc92-47ac-aaf0-ae153a289832-kube-api-access-6dndn\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:07 crc kubenswrapper[4869]: I0130 11:12:07.165904 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4tfk4\" (UniqueName: \"kubernetes.io/projected/d55e229e-1e13-4cf5-9b86-32122704fa72-kube-api-access-4tfk4\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:07 crc kubenswrapper[4869]: I0130 11:12:07.165922 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d55e229e-1e13-4cf5-9b86-32122704fa72-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:07 crc kubenswrapper[4869]: I0130 11:12:07.505887 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-jqsls" event={"ID":"1293c111-bc92-47ac-aaf0-ae153a289832","Type":"ContainerDied","Data":"ab847d4cb0929adad3dd2666a66e093f420d3ccbce8084d0b8ea46360a8286ed"} Jan 30 11:12:07 crc kubenswrapper[4869]: I0130 11:12:07.505924 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ab847d4cb0929adad3dd2666a66e093f420d3ccbce8084d0b8ea46360a8286ed" Jan 30 11:12:07 crc kubenswrapper[4869]: I0130 11:12:07.506015 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-jqsls" Jan 30 11:12:07 crc kubenswrapper[4869]: I0130 11:12:07.508493 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lvfzc" Jan 30 11:12:07 crc kubenswrapper[4869]: I0130 11:12:07.508501 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-lvfzc" event={"ID":"d55e229e-1e13-4cf5-9b86-32122704fa72","Type":"ContainerDied","Data":"91e27d3427a22499ffb2641108462c099d9bd88cee0d3eaffea4635da7fed38d"} Jan 30 11:12:07 crc kubenswrapper[4869]: I0130 11:12:07.508536 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="91e27d3427a22499ffb2641108462c099d9bd88cee0d3eaffea4635da7fed38d" Jan 30 11:12:07 crc kubenswrapper[4869]: I0130 11:12:07.517885 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" event={"ID":"532aee83-19a5-40bc-8f60-2cd3e3f662c1","Type":"ContainerStarted","Data":"8693695ab06c3fc00563701174069c7cfa4aa976ac7640c33a5563a481cf2a01"} Jan 30 11:12:07 crc kubenswrapper[4869]: I0130 11:12:07.518393 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" Jan 30 11:12:07 crc kubenswrapper[4869]: I0130 11:12:07.546125 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" podStartSLOduration=3.546104571 podStartE2EDuration="3.546104571s" podCreationTimestamp="2026-01-30 11:12:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:12:07.542635233 +0000 UTC m=+1078.092511299" watchObservedRunningTime="2026-01-30 11:12:07.546104571 +0000 UTC m=+1078.095980637" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.215170 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-70de-account-create-update-qkqf9" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.224477 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-7037-account-create-update-46qmh" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.290128 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-6l5cw" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.310747 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8f65-account-create-update-mz5hf" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.326889 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1c9c7cc-73fd-41bb-8418-126303047e6a-operator-scripts\") pod \"b1c9c7cc-73fd-41bb-8418-126303047e6a\" (UID: \"b1c9c7cc-73fd-41bb-8418-126303047e6a\") " Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.327011 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2wc4k\" (UniqueName: \"kubernetes.io/projected/b1c9c7cc-73fd-41bb-8418-126303047e6a-kube-api-access-2wc4k\") pod \"b1c9c7cc-73fd-41bb-8418-126303047e6a\" (UID: \"b1c9c7cc-73fd-41bb-8418-126303047e6a\") " Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.327071 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m87cj\" (UniqueName: \"kubernetes.io/projected/fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b-kube-api-access-m87cj\") pod \"fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b\" (UID: \"fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b\") " Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.327102 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b-operator-scripts\") pod \"fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b\" (UID: \"fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b\") " Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.328659 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b" (UID: "fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.329375 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1c9c7cc-73fd-41bb-8418-126303047e6a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b1c9c7cc-73fd-41bb-8418-126303047e6a" (UID: "b1c9c7cc-73fd-41bb-8418-126303047e6a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.333918 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1c9c7cc-73fd-41bb-8418-126303047e6a-kube-api-access-2wc4k" (OuterVolumeSpecName: "kube-api-access-2wc4k") pod "b1c9c7cc-73fd-41bb-8418-126303047e6a" (UID: "b1c9c7cc-73fd-41bb-8418-126303047e6a"). InnerVolumeSpecName "kube-api-access-2wc4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.334834 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b-kube-api-access-m87cj" (OuterVolumeSpecName: "kube-api-access-m87cj") pod "fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b" (UID: "fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b"). InnerVolumeSpecName "kube-api-access-m87cj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.428096 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0dc6721d-cb72-45ad-ad97-1c045d0bd2cb-operator-scripts\") pod \"0dc6721d-cb72-45ad-ad97-1c045d0bd2cb\" (UID: \"0dc6721d-cb72-45ad-ad97-1c045d0bd2cb\") " Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.428129 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35a2d1cd-798d-4e72-a4fc-b0609e30f714-operator-scripts\") pod \"35a2d1cd-798d-4e72-a4fc-b0609e30f714\" (UID: \"35a2d1cd-798d-4e72-a4fc-b0609e30f714\") " Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.428167 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wd7zh\" (UniqueName: \"kubernetes.io/projected/0dc6721d-cb72-45ad-ad97-1c045d0bd2cb-kube-api-access-wd7zh\") pod \"0dc6721d-cb72-45ad-ad97-1c045d0bd2cb\" (UID: \"0dc6721d-cb72-45ad-ad97-1c045d0bd2cb\") " Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.428301 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ckz6\" (UniqueName: \"kubernetes.io/projected/35a2d1cd-798d-4e72-a4fc-b0609e30f714-kube-api-access-6ckz6\") pod \"35a2d1cd-798d-4e72-a4fc-b0609e30f714\" (UID: \"35a2d1cd-798d-4e72-a4fc-b0609e30f714\") " Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.428651 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0dc6721d-cb72-45ad-ad97-1c045d0bd2cb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0dc6721d-cb72-45ad-ad97-1c045d0bd2cb" (UID: "0dc6721d-cb72-45ad-ad97-1c045d0bd2cb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.428661 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m87cj\" (UniqueName: \"kubernetes.io/projected/fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b-kube-api-access-m87cj\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.428866 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.428877 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1c9c7cc-73fd-41bb-8418-126303047e6a-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.428886 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2wc4k\" (UniqueName: \"kubernetes.io/projected/b1c9c7cc-73fd-41bb-8418-126303047e6a-kube-api-access-2wc4k\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.428977 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35a2d1cd-798d-4e72-a4fc-b0609e30f714-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "35a2d1cd-798d-4e72-a4fc-b0609e30f714" (UID: "35a2d1cd-798d-4e72-a4fc-b0609e30f714"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.432291 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35a2d1cd-798d-4e72-a4fc-b0609e30f714-kube-api-access-6ckz6" (OuterVolumeSpecName: "kube-api-access-6ckz6") pod "35a2d1cd-798d-4e72-a4fc-b0609e30f714" (UID: "35a2d1cd-798d-4e72-a4fc-b0609e30f714"). InnerVolumeSpecName "kube-api-access-6ckz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.432632 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0dc6721d-cb72-45ad-ad97-1c045d0bd2cb-kube-api-access-wd7zh" (OuterVolumeSpecName: "kube-api-access-wd7zh") pod "0dc6721d-cb72-45ad-ad97-1c045d0bd2cb" (UID: "0dc6721d-cb72-45ad-ad97-1c045d0bd2cb"). InnerVolumeSpecName "kube-api-access-wd7zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.530953 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0dc6721d-cb72-45ad-ad97-1c045d0bd2cb-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.531005 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35a2d1cd-798d-4e72-a4fc-b0609e30f714-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.531572 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wd7zh\" (UniqueName: \"kubernetes.io/projected/0dc6721d-cb72-45ad-ad97-1c045d0bd2cb-kube-api-access-wd7zh\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.531639 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ckz6\" (UniqueName: \"kubernetes.io/projected/35a2d1cd-798d-4e72-a4fc-b0609e30f714-kube-api-access-6ckz6\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.550802 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-k8rcl" event={"ID":"1b6486d6-add2-4abe-8ccb-35517810f949","Type":"ContainerStarted","Data":"075eb9430b2d0c542307c4443b0f873f573935612450db8c9b4d96f44753db02"} Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.557500 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-7037-account-create-update-46qmh" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.557494 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-7037-account-create-update-46qmh" event={"ID":"fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b","Type":"ContainerDied","Data":"5ec23fcbba74e32da7588f9f57492e531174a13341f7a00f4958ac2d24649d5c"} Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.558091 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ec23fcbba74e32da7588f9f57492e531174a13341f7a00f4958ac2d24649d5c" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.560049 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-70de-account-create-update-qkqf9" event={"ID":"b1c9c7cc-73fd-41bb-8418-126303047e6a","Type":"ContainerDied","Data":"91798be69dad5040bf11e8206c0b12c6b550e08d212192987196b9fadbcab41f"} Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.560093 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="91798be69dad5040bf11e8206c0b12c6b550e08d212192987196b9fadbcab41f" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.560155 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-70de-account-create-update-qkqf9" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.570171 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-6l5cw" event={"ID":"0dc6721d-cb72-45ad-ad97-1c045d0bd2cb","Type":"ContainerDied","Data":"7a4080031a92d9a1ca3379533a4142321fbe5a651ae27a1bce1bb95d93dbcb36"} Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.570533 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a4080031a92d9a1ca3379533a4142321fbe5a651ae27a1bce1bb95d93dbcb36" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.570218 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-6l5cw" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.575405 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8f65-account-create-update-mz5hf" event={"ID":"35a2d1cd-798d-4e72-a4fc-b0609e30f714","Type":"ContainerDied","Data":"5bbaae5665a6a6ec9f65eb2073f521ba71ae0c8c942676bb88e773bddf82f251"} Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.575443 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5bbaae5665a6a6ec9f65eb2073f521ba71ae0c8c942676bb88e773bddf82f251" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.575502 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8f65-account-create-update-mz5hf" Jan 30 11:12:10 crc kubenswrapper[4869]: I0130 11:12:10.589490 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-k8rcl" podStartSLOduration=1.954807754 podStartE2EDuration="7.589472859s" podCreationTimestamp="2026-01-30 11:12:03 +0000 UTC" firstStartedPulling="2026-01-30 11:12:04.435198587 +0000 UTC m=+1074.985074663" lastFinishedPulling="2026-01-30 11:12:10.069863702 +0000 UTC m=+1080.619739768" observedRunningTime="2026-01-30 11:12:10.567991129 +0000 UTC m=+1081.117867195" watchObservedRunningTime="2026-01-30 11:12:10.589472859 +0000 UTC m=+1081.139348925" Jan 30 11:12:13 crc kubenswrapper[4869]: I0130 11:12:13.600505 4869 generic.go:334] "Generic (PLEG): container finished" podID="1b6486d6-add2-4abe-8ccb-35517810f949" containerID="075eb9430b2d0c542307c4443b0f873f573935612450db8c9b4d96f44753db02" exitCode=0 Jan 30 11:12:13 crc kubenswrapper[4869]: I0130 11:12:13.600558 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-k8rcl" event={"ID":"1b6486d6-add2-4abe-8ccb-35517810f949","Type":"ContainerDied","Data":"075eb9430b2d0c542307c4443b0f873f573935612450db8c9b4d96f44753db02"} Jan 30 11:12:14 crc kubenswrapper[4869]: I0130 11:12:14.930309 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-k8rcl" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.010274 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b6486d6-add2-4abe-8ccb-35517810f949-config-data\") pod \"1b6486d6-add2-4abe-8ccb-35517810f949\" (UID: \"1b6486d6-add2-4abe-8ccb-35517810f949\") " Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.010342 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gg5hv\" (UniqueName: \"kubernetes.io/projected/1b6486d6-add2-4abe-8ccb-35517810f949-kube-api-access-gg5hv\") pod \"1b6486d6-add2-4abe-8ccb-35517810f949\" (UID: \"1b6486d6-add2-4abe-8ccb-35517810f949\") " Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.010467 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b6486d6-add2-4abe-8ccb-35517810f949-combined-ca-bundle\") pod \"1b6486d6-add2-4abe-8ccb-35517810f949\" (UID: \"1b6486d6-add2-4abe-8ccb-35517810f949\") " Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.015892 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b6486d6-add2-4abe-8ccb-35517810f949-kube-api-access-gg5hv" (OuterVolumeSpecName: "kube-api-access-gg5hv") pod "1b6486d6-add2-4abe-8ccb-35517810f949" (UID: "1b6486d6-add2-4abe-8ccb-35517810f949"). InnerVolumeSpecName "kube-api-access-gg5hv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.038807 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b6486d6-add2-4abe-8ccb-35517810f949-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1b6486d6-add2-4abe-8ccb-35517810f949" (UID: "1b6486d6-add2-4abe-8ccb-35517810f949"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.055723 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b6486d6-add2-4abe-8ccb-35517810f949-config-data" (OuterVolumeSpecName: "config-data") pod "1b6486d6-add2-4abe-8ccb-35517810f949" (UID: "1b6486d6-add2-4abe-8ccb-35517810f949"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.111963 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b6486d6-add2-4abe-8ccb-35517810f949-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.112001 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b6486d6-add2-4abe-8ccb-35517810f949-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.112017 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gg5hv\" (UniqueName: \"kubernetes.io/projected/1b6486d6-add2-4abe-8ccb-35517810f949-kube-api-access-gg5hv\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.267639 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.313402 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-gddbs"] Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.313740 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" podUID="58fab5d7-620a-47aa-9df0-35e587e79318" containerName="dnsmasq-dns" containerID="cri-o://68d49cec397a34f793d27a79e235f23845370377aefe0db471de78c183dae9c8" gracePeriod=10 Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.617406 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-k8rcl" event={"ID":"1b6486d6-add2-4abe-8ccb-35517810f949","Type":"ContainerDied","Data":"b937c26e8183b10cb5b4f6c82f0582b388d135f43144693626c8b73d6d331e88"} Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.617459 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b937c26e8183b10cb5b4f6c82f0582b388d135f43144693626c8b73d6d331e88" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.617425 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-k8rcl" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.832303 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-hh5lm"] Jan 30 11:12:15 crc kubenswrapper[4869]: E0130 11:12:15.832697 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d55e229e-1e13-4cf5-9b86-32122704fa72" containerName="mariadb-database-create" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.832741 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d55e229e-1e13-4cf5-9b86-32122704fa72" containerName="mariadb-database-create" Jan 30 11:12:15 crc kubenswrapper[4869]: E0130 11:12:15.832765 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b6486d6-add2-4abe-8ccb-35517810f949" containerName="keystone-db-sync" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.832774 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b6486d6-add2-4abe-8ccb-35517810f949" containerName="keystone-db-sync" Jan 30 11:12:15 crc kubenswrapper[4869]: E0130 11:12:15.832786 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35a2d1cd-798d-4e72-a4fc-b0609e30f714" containerName="mariadb-account-create-update" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.832794 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="35a2d1cd-798d-4e72-a4fc-b0609e30f714" containerName="mariadb-account-create-update" Jan 30 11:12:15 crc kubenswrapper[4869]: E0130 11:12:15.832810 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83bf5b74-32e0-4431-bea8-94ea95616a23" containerName="dnsmasq-dns" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.832818 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="83bf5b74-32e0-4431-bea8-94ea95616a23" containerName="dnsmasq-dns" Jan 30 11:12:15 crc kubenswrapper[4869]: E0130 11:12:15.832837 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dc6721d-cb72-45ad-ad97-1c045d0bd2cb" containerName="mariadb-database-create" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.832846 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dc6721d-cb72-45ad-ad97-1c045d0bd2cb" containerName="mariadb-database-create" Jan 30 11:12:15 crc kubenswrapper[4869]: E0130 11:12:15.832860 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b" containerName="mariadb-account-create-update" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.832869 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b" containerName="mariadb-account-create-update" Jan 30 11:12:15 crc kubenswrapper[4869]: E0130 11:12:15.832885 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1293c111-bc92-47ac-aaf0-ae153a289832" containerName="mariadb-database-create" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.832892 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="1293c111-bc92-47ac-aaf0-ae153a289832" containerName="mariadb-database-create" Jan 30 11:12:15 crc kubenswrapper[4869]: E0130 11:12:15.832903 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1c9c7cc-73fd-41bb-8418-126303047e6a" containerName="mariadb-account-create-update" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.832911 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1c9c7cc-73fd-41bb-8418-126303047e6a" containerName="mariadb-account-create-update" Jan 30 11:12:15 crc kubenswrapper[4869]: E0130 11:12:15.832931 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83bf5b74-32e0-4431-bea8-94ea95616a23" containerName="init" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.832938 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="83bf5b74-32e0-4431-bea8-94ea95616a23" containerName="init" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.833126 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="1293c111-bc92-47ac-aaf0-ae153a289832" containerName="mariadb-database-create" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.833149 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="83bf5b74-32e0-4431-bea8-94ea95616a23" containerName="dnsmasq-dns" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.833160 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="d55e229e-1e13-4cf5-9b86-32122704fa72" containerName="mariadb-database-create" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.833173 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="35a2d1cd-798d-4e72-a4fc-b0609e30f714" containerName="mariadb-account-create-update" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.833182 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1c9c7cc-73fd-41bb-8418-126303047e6a" containerName="mariadb-account-create-update" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.833197 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b6486d6-add2-4abe-8ccb-35517810f949" containerName="keystone-db-sync" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.833210 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b" containerName="mariadb-account-create-update" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.833228 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="0dc6721d-cb72-45ad-ad97-1c045d0bd2cb" containerName="mariadb-database-create" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.834314 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbf5cc879-hh5lm" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.844766 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-hh5lm"] Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.899872 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-8hhhx"] Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.901050 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-8hhhx" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.907796 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.908062 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.908184 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.908332 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-kldpt" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.908470 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 30 11:12:15 crc kubenswrapper[4869]: I0130 11:12:15.923423 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-8hhhx"] Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.025728 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-ovsdbserver-sb\") pod \"dnsmasq-dns-bbf5cc879-hh5lm\" (UID: \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\") " pod="openstack/dnsmasq-dns-bbf5cc879-hh5lm" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.025778 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-config\") pod \"dnsmasq-dns-bbf5cc879-hh5lm\" (UID: \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\") " pod="openstack/dnsmasq-dns-bbf5cc879-hh5lm" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.025795 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-dns-svc\") pod \"dnsmasq-dns-bbf5cc879-hh5lm\" (UID: \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\") " pod="openstack/dnsmasq-dns-bbf5cc879-hh5lm" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.025826 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-config-data\") pod \"keystone-bootstrap-8hhhx\" (UID: \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\") " pod="openstack/keystone-bootstrap-8hhhx" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.025960 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-ovsdbserver-nb\") pod \"dnsmasq-dns-bbf5cc879-hh5lm\" (UID: \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\") " pod="openstack/dnsmasq-dns-bbf5cc879-hh5lm" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.026032 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wf4db\" (UniqueName: \"kubernetes.io/projected/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-kube-api-access-wf4db\") pod \"dnsmasq-dns-bbf5cc879-hh5lm\" (UID: \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\") " pod="openstack/dnsmasq-dns-bbf5cc879-hh5lm" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.026053 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-scripts\") pod \"keystone-bootstrap-8hhhx\" (UID: \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\") " pod="openstack/keystone-bootstrap-8hhhx" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.026073 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-combined-ca-bundle\") pod \"keystone-bootstrap-8hhhx\" (UID: \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\") " pod="openstack/keystone-bootstrap-8hhhx" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.026103 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-dns-swift-storage-0\") pod \"dnsmasq-dns-bbf5cc879-hh5lm\" (UID: \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\") " pod="openstack/dnsmasq-dns-bbf5cc879-hh5lm" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.026180 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-credential-keys\") pod \"keystone-bootstrap-8hhhx\" (UID: \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\") " pod="openstack/keystone-bootstrap-8hhhx" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.026282 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktbbq\" (UniqueName: \"kubernetes.io/projected/d399c117-cbff-4044-8b48-54a5c0cc4a2c-kube-api-access-ktbbq\") pod \"keystone-bootstrap-8hhhx\" (UID: \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\") " pod="openstack/keystone-bootstrap-8hhhx" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.026303 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-fernet-keys\") pod \"keystone-bootstrap-8hhhx\" (UID: \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\") " pod="openstack/keystone-bootstrap-8hhhx" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.127694 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wf4db\" (UniqueName: \"kubernetes.io/projected/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-kube-api-access-wf4db\") pod \"dnsmasq-dns-bbf5cc879-hh5lm\" (UID: \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\") " pod="openstack/dnsmasq-dns-bbf5cc879-hh5lm" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.128010 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-scripts\") pod \"keystone-bootstrap-8hhhx\" (UID: \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\") " pod="openstack/keystone-bootstrap-8hhhx" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.128035 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-combined-ca-bundle\") pod \"keystone-bootstrap-8hhhx\" (UID: \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\") " pod="openstack/keystone-bootstrap-8hhhx" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.128061 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-dns-swift-storage-0\") pod \"dnsmasq-dns-bbf5cc879-hh5lm\" (UID: \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\") " pod="openstack/dnsmasq-dns-bbf5cc879-hh5lm" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.128080 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-credential-keys\") pod \"keystone-bootstrap-8hhhx\" (UID: \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\") " pod="openstack/keystone-bootstrap-8hhhx" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.128111 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktbbq\" (UniqueName: \"kubernetes.io/projected/d399c117-cbff-4044-8b48-54a5c0cc4a2c-kube-api-access-ktbbq\") pod \"keystone-bootstrap-8hhhx\" (UID: \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\") " pod="openstack/keystone-bootstrap-8hhhx" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.128132 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-fernet-keys\") pod \"keystone-bootstrap-8hhhx\" (UID: \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\") " pod="openstack/keystone-bootstrap-8hhhx" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.128207 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-ovsdbserver-sb\") pod \"dnsmasq-dns-bbf5cc879-hh5lm\" (UID: \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\") " pod="openstack/dnsmasq-dns-bbf5cc879-hh5lm" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.128230 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-config\") pod \"dnsmasq-dns-bbf5cc879-hh5lm\" (UID: \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\") " pod="openstack/dnsmasq-dns-bbf5cc879-hh5lm" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.128249 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-dns-svc\") pod \"dnsmasq-dns-bbf5cc879-hh5lm\" (UID: \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\") " pod="openstack/dnsmasq-dns-bbf5cc879-hh5lm" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.128278 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-config-data\") pod \"keystone-bootstrap-8hhhx\" (UID: \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\") " pod="openstack/keystone-bootstrap-8hhhx" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.128306 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-ovsdbserver-nb\") pod \"dnsmasq-dns-bbf5cc879-hh5lm\" (UID: \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\") " pod="openstack/dnsmasq-dns-bbf5cc879-hh5lm" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.129179 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-ovsdbserver-nb\") pod \"dnsmasq-dns-bbf5cc879-hh5lm\" (UID: \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\") " pod="openstack/dnsmasq-dns-bbf5cc879-hh5lm" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.130764 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-config\") pod \"dnsmasq-dns-bbf5cc879-hh5lm\" (UID: \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\") " pod="openstack/dnsmasq-dns-bbf5cc879-hh5lm" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.131355 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-ovsdbserver-sb\") pod \"dnsmasq-dns-bbf5cc879-hh5lm\" (UID: \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\") " pod="openstack/dnsmasq-dns-bbf5cc879-hh5lm" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.133831 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-dns-swift-storage-0\") pod \"dnsmasq-dns-bbf5cc879-hh5lm\" (UID: \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\") " pod="openstack/dnsmasq-dns-bbf5cc879-hh5lm" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.144911 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-config-data\") pod \"keystone-bootstrap-8hhhx\" (UID: \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\") " pod="openstack/keystone-bootstrap-8hhhx" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.147681 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-fernet-keys\") pod \"keystone-bootstrap-8hhhx\" (UID: \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\") " pod="openstack/keystone-bootstrap-8hhhx" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.148205 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-scripts\") pod \"keystone-bootstrap-8hhhx\" (UID: \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\") " pod="openstack/keystone-bootstrap-8hhhx" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.149250 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-dns-svc\") pod \"dnsmasq-dns-bbf5cc879-hh5lm\" (UID: \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\") " pod="openstack/dnsmasq-dns-bbf5cc879-hh5lm" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.153277 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-credential-keys\") pod \"keystone-bootstrap-8hhhx\" (UID: \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\") " pod="openstack/keystone-bootstrap-8hhhx" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.160649 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-combined-ca-bundle\") pod \"keystone-bootstrap-8hhhx\" (UID: \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\") " pod="openstack/keystone-bootstrap-8hhhx" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.170896 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wf4db\" (UniqueName: \"kubernetes.io/projected/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-kube-api-access-wf4db\") pod \"dnsmasq-dns-bbf5cc879-hh5lm\" (UID: \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\") " pod="openstack/dnsmasq-dns-bbf5cc879-hh5lm" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.174327 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktbbq\" (UniqueName: \"kubernetes.io/projected/d399c117-cbff-4044-8b48-54a5c0cc4a2c-kube-api-access-ktbbq\") pod \"keystone-bootstrap-8hhhx\" (UID: \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\") " pod="openstack/keystone-bootstrap-8hhhx" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.207826 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbf5cc879-hh5lm" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.225720 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-5ctps"] Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.226938 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-pwgmc"] Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.227627 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-pwgmc"] Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.227725 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-pwgmc" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.227862 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-5ctps" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.231251 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-dgq8b" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.231669 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.232062 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.232221 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-8hjgx" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.232328 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.232435 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.237987 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-8hhhx" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.245901 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-5ctps"] Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.357914 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/03699fa5-87c3-42b4-907b-586fa9d208af-etc-machine-id\") pod \"cinder-db-sync-5ctps\" (UID: \"03699fa5-87c3-42b4-907b-586fa9d208af\") " pod="openstack/cinder-db-sync-5ctps" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.357994 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b116442d-9126-417b-a8cf-b36c70966e46-config\") pod \"neutron-db-sync-pwgmc\" (UID: \"b116442d-9126-417b-a8cf-b36c70966e46\") " pod="openstack/neutron-db-sync-pwgmc" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.358075 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwz96\" (UniqueName: \"kubernetes.io/projected/b116442d-9126-417b-a8cf-b36c70966e46-kube-api-access-gwz96\") pod \"neutron-db-sync-pwgmc\" (UID: \"b116442d-9126-417b-a8cf-b36c70966e46\") " pod="openstack/neutron-db-sync-pwgmc" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.358128 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/03699fa5-87c3-42b4-907b-586fa9d208af-db-sync-config-data\") pod \"cinder-db-sync-5ctps\" (UID: \"03699fa5-87c3-42b4-907b-586fa9d208af\") " pod="openstack/cinder-db-sync-5ctps" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.358171 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03699fa5-87c3-42b4-907b-586fa9d208af-combined-ca-bundle\") pod \"cinder-db-sync-5ctps\" (UID: \"03699fa5-87c3-42b4-907b-586fa9d208af\") " pod="openstack/cinder-db-sync-5ctps" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.358234 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03699fa5-87c3-42b4-907b-586fa9d208af-config-data\") pod \"cinder-db-sync-5ctps\" (UID: \"03699fa5-87c3-42b4-907b-586fa9d208af\") " pod="openstack/cinder-db-sync-5ctps" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.358304 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxhwv\" (UniqueName: \"kubernetes.io/projected/03699fa5-87c3-42b4-907b-586fa9d208af-kube-api-access-dxhwv\") pod \"cinder-db-sync-5ctps\" (UID: \"03699fa5-87c3-42b4-907b-586fa9d208af\") " pod="openstack/cinder-db-sync-5ctps" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.360967 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b116442d-9126-417b-a8cf-b36c70966e46-combined-ca-bundle\") pod \"neutron-db-sync-pwgmc\" (UID: \"b116442d-9126-417b-a8cf-b36c70966e46\") " pod="openstack/neutron-db-sync-pwgmc" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.361070 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03699fa5-87c3-42b4-907b-586fa9d208af-scripts\") pod \"cinder-db-sync-5ctps\" (UID: \"03699fa5-87c3-42b4-907b-586fa9d208af\") " pod="openstack/cinder-db-sync-5ctps" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.428887 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-hh5lm"] Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.451105 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-mr25n"] Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.453250 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-mr25n" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.463487 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.463763 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-fnx6m" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.464121 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxhwv\" (UniqueName: \"kubernetes.io/projected/03699fa5-87c3-42b4-907b-586fa9d208af-kube-api-access-dxhwv\") pod \"cinder-db-sync-5ctps\" (UID: \"03699fa5-87c3-42b4-907b-586fa9d208af\") " pod="openstack/cinder-db-sync-5ctps" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.464176 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b116442d-9126-417b-a8cf-b36c70966e46-combined-ca-bundle\") pod \"neutron-db-sync-pwgmc\" (UID: \"b116442d-9126-417b-a8cf-b36c70966e46\") " pod="openstack/neutron-db-sync-pwgmc" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.464206 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03699fa5-87c3-42b4-907b-586fa9d208af-scripts\") pod \"cinder-db-sync-5ctps\" (UID: \"03699fa5-87c3-42b4-907b-586fa9d208af\") " pod="openstack/cinder-db-sync-5ctps" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.464256 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ecba8db-afd9-4f76-b5f1-61acfb49bd68-combined-ca-bundle\") pod \"barbican-db-sync-mr25n\" (UID: \"5ecba8db-afd9-4f76-b5f1-61acfb49bd68\") " pod="openstack/barbican-db-sync-mr25n" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.464282 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxjkd\" (UniqueName: \"kubernetes.io/projected/5ecba8db-afd9-4f76-b5f1-61acfb49bd68-kube-api-access-lxjkd\") pod \"barbican-db-sync-mr25n\" (UID: \"5ecba8db-afd9-4f76-b5f1-61acfb49bd68\") " pod="openstack/barbican-db-sync-mr25n" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.464316 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5ecba8db-afd9-4f76-b5f1-61acfb49bd68-db-sync-config-data\") pod \"barbican-db-sync-mr25n\" (UID: \"5ecba8db-afd9-4f76-b5f1-61acfb49bd68\") " pod="openstack/barbican-db-sync-mr25n" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.464368 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/03699fa5-87c3-42b4-907b-586fa9d208af-etc-machine-id\") pod \"cinder-db-sync-5ctps\" (UID: \"03699fa5-87c3-42b4-907b-586fa9d208af\") " pod="openstack/cinder-db-sync-5ctps" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.464389 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b116442d-9126-417b-a8cf-b36c70966e46-config\") pod \"neutron-db-sync-pwgmc\" (UID: \"b116442d-9126-417b-a8cf-b36c70966e46\") " pod="openstack/neutron-db-sync-pwgmc" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.464442 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwz96\" (UniqueName: \"kubernetes.io/projected/b116442d-9126-417b-a8cf-b36c70966e46-kube-api-access-gwz96\") pod \"neutron-db-sync-pwgmc\" (UID: \"b116442d-9126-417b-a8cf-b36c70966e46\") " pod="openstack/neutron-db-sync-pwgmc" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.464468 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/03699fa5-87c3-42b4-907b-586fa9d208af-db-sync-config-data\") pod \"cinder-db-sync-5ctps\" (UID: \"03699fa5-87c3-42b4-907b-586fa9d208af\") " pod="openstack/cinder-db-sync-5ctps" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.464496 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03699fa5-87c3-42b4-907b-586fa9d208af-combined-ca-bundle\") pod \"cinder-db-sync-5ctps\" (UID: \"03699fa5-87c3-42b4-907b-586fa9d208af\") " pod="openstack/cinder-db-sync-5ctps" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.464524 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03699fa5-87c3-42b4-907b-586fa9d208af-config-data\") pod \"cinder-db-sync-5ctps\" (UID: \"03699fa5-87c3-42b4-907b-586fa9d208af\") " pod="openstack/cinder-db-sync-5ctps" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.467354 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/03699fa5-87c3-42b4-907b-586fa9d208af-etc-machine-id\") pod \"cinder-db-sync-5ctps\" (UID: \"03699fa5-87c3-42b4-907b-586fa9d208af\") " pod="openstack/cinder-db-sync-5ctps" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.470508 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b116442d-9126-417b-a8cf-b36c70966e46-combined-ca-bundle\") pod \"neutron-db-sync-pwgmc\" (UID: \"b116442d-9126-417b-a8cf-b36c70966e46\") " pod="openstack/neutron-db-sync-pwgmc" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.476438 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03699fa5-87c3-42b4-907b-586fa9d208af-scripts\") pod \"cinder-db-sync-5ctps\" (UID: \"03699fa5-87c3-42b4-907b-586fa9d208af\") " pod="openstack/cinder-db-sync-5ctps" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.478752 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/b116442d-9126-417b-a8cf-b36c70966e46-config\") pod \"neutron-db-sync-pwgmc\" (UID: \"b116442d-9126-417b-a8cf-b36c70966e46\") " pod="openstack/neutron-db-sync-pwgmc" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.479072 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/03699fa5-87c3-42b4-907b-586fa9d208af-db-sync-config-data\") pod \"cinder-db-sync-5ctps\" (UID: \"03699fa5-87c3-42b4-907b-586fa9d208af\") " pod="openstack/cinder-db-sync-5ctps" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.481243 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03699fa5-87c3-42b4-907b-586fa9d208af-combined-ca-bundle\") pod \"cinder-db-sync-5ctps\" (UID: \"03699fa5-87c3-42b4-907b-586fa9d208af\") " pod="openstack/cinder-db-sync-5ctps" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.481608 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03699fa5-87c3-42b4-907b-586fa9d208af-config-data\") pod \"cinder-db-sync-5ctps\" (UID: \"03699fa5-87c3-42b4-907b-586fa9d208af\") " pod="openstack/cinder-db-sync-5ctps" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.492819 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-mr25n"] Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.500193 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwz96\" (UniqueName: \"kubernetes.io/projected/b116442d-9126-417b-a8cf-b36c70966e46-kube-api-access-gwz96\") pod \"neutron-db-sync-pwgmc\" (UID: \"b116442d-9126-417b-a8cf-b36c70966e46\") " pod="openstack/neutron-db-sync-pwgmc" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.506905 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-5jwbz"] Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.509054 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.509684 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxhwv\" (UniqueName: \"kubernetes.io/projected/03699fa5-87c3-42b4-907b-586fa9d208af-kube-api-access-dxhwv\") pod \"cinder-db-sync-5ctps\" (UID: \"03699fa5-87c3-42b4-907b-586fa9d208af\") " pod="openstack/cinder-db-sync-5ctps" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.510364 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-5jwbz"] Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.541413 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-sj9ql"] Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.543450 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-sj9ql" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.547060 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.547227 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.552201 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-plprs" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.563893 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-sj9ql"] Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.568092 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-5jwbz\" (UID: \"6428ca59-598a-417c-a4b0-5f1cabe400cf\") " pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.568310 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-5jwbz\" (UID: \"6428ca59-598a-417c-a4b0-5f1cabe400cf\") " pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.568480 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-config\") pod \"dnsmasq-dns-56df8fb6b7-5jwbz\" (UID: \"6428ca59-598a-417c-a4b0-5f1cabe400cf\") " pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.568536 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/44ca8382-9045-4817-b73f-3c885e446fab-logs\") pod \"placement-db-sync-sj9ql\" (UID: \"44ca8382-9045-4817-b73f-3c885e446fab\") " pod="openstack/placement-db-sync-sj9ql" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.570339 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44ca8382-9045-4817-b73f-3c885e446fab-config-data\") pod \"placement-db-sync-sj9ql\" (UID: \"44ca8382-9045-4817-b73f-3c885e446fab\") " pod="openstack/placement-db-sync-sj9ql" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.570487 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ecba8db-afd9-4f76-b5f1-61acfb49bd68-combined-ca-bundle\") pod \"barbican-db-sync-mr25n\" (UID: \"5ecba8db-afd9-4f76-b5f1-61acfb49bd68\") " pod="openstack/barbican-db-sync-mr25n" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.570524 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxjkd\" (UniqueName: \"kubernetes.io/projected/5ecba8db-afd9-4f76-b5f1-61acfb49bd68-kube-api-access-lxjkd\") pod \"barbican-db-sync-mr25n\" (UID: \"5ecba8db-afd9-4f76-b5f1-61acfb49bd68\") " pod="openstack/barbican-db-sync-mr25n" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.570696 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5ecba8db-afd9-4f76-b5f1-61acfb49bd68-db-sync-config-data\") pod \"barbican-db-sync-mr25n\" (UID: \"5ecba8db-afd9-4f76-b5f1-61acfb49bd68\") " pod="openstack/barbican-db-sync-mr25n" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.570811 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62g6z\" (UniqueName: \"kubernetes.io/projected/44ca8382-9045-4817-b73f-3c885e446fab-kube-api-access-62g6z\") pod \"placement-db-sync-sj9ql\" (UID: \"44ca8382-9045-4817-b73f-3c885e446fab\") " pod="openstack/placement-db-sync-sj9ql" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.570832 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44ca8382-9045-4817-b73f-3c885e446fab-scripts\") pod \"placement-db-sync-sj9ql\" (UID: \"44ca8382-9045-4817-b73f-3c885e446fab\") " pod="openstack/placement-db-sync-sj9ql" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.570939 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-5jwbz\" (UID: \"6428ca59-598a-417c-a4b0-5f1cabe400cf\") " pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.570960 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2879x\" (UniqueName: \"kubernetes.io/projected/6428ca59-598a-417c-a4b0-5f1cabe400cf-kube-api-access-2879x\") pod \"dnsmasq-dns-56df8fb6b7-5jwbz\" (UID: \"6428ca59-598a-417c-a4b0-5f1cabe400cf\") " pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.570979 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-5jwbz\" (UID: \"6428ca59-598a-417c-a4b0-5f1cabe400cf\") " pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.570996 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44ca8382-9045-4817-b73f-3c885e446fab-combined-ca-bundle\") pod \"placement-db-sync-sj9ql\" (UID: \"44ca8382-9045-4817-b73f-3c885e446fab\") " pod="openstack/placement-db-sync-sj9ql" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.574090 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ecba8db-afd9-4f76-b5f1-61acfb49bd68-combined-ca-bundle\") pod \"barbican-db-sync-mr25n\" (UID: \"5ecba8db-afd9-4f76-b5f1-61acfb49bd68\") " pod="openstack/barbican-db-sync-mr25n" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.575650 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5ecba8db-afd9-4f76-b5f1-61acfb49bd68-db-sync-config-data\") pod \"barbican-db-sync-mr25n\" (UID: \"5ecba8db-afd9-4f76-b5f1-61acfb49bd68\") " pod="openstack/barbican-db-sync-mr25n" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.576524 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.579135 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.581194 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.581608 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.590841 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxjkd\" (UniqueName: \"kubernetes.io/projected/5ecba8db-afd9-4f76-b5f1-61acfb49bd68-kube-api-access-lxjkd\") pod \"barbican-db-sync-mr25n\" (UID: \"5ecba8db-afd9-4f76-b5f1-61acfb49bd68\") " pod="openstack/barbican-db-sync-mr25n" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.606054 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.609898 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.641732 4869 generic.go:334] "Generic (PLEG): container finished" podID="58fab5d7-620a-47aa-9df0-35e587e79318" containerID="68d49cec397a34f793d27a79e235f23845370377aefe0db471de78c183dae9c8" exitCode=0 Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.641771 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" event={"ID":"58fab5d7-620a-47aa-9df0-35e587e79318","Type":"ContainerDied","Data":"68d49cec397a34f793d27a79e235f23845370377aefe0db471de78c183dae9c8"} Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.641794 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" event={"ID":"58fab5d7-620a-47aa-9df0-35e587e79318","Type":"ContainerDied","Data":"53d92479d0c328fb0b34da50da76a6c3b7f8459fa1c071e30580e93f0e9e2667"} Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.641811 4869 scope.go:117] "RemoveContainer" containerID="68d49cec397a34f793d27a79e235f23845370377aefe0db471de78c183dae9c8" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.641919 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-gddbs" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.671278 4869 scope.go:117] "RemoveContainer" containerID="732d9e658eb07f2f4a62814c0b91bd858365ec70a789dec7f657f1b41f5769f8" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.671909 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/58fab5d7-620a-47aa-9df0-35e587e79318-dns-svc\") pod \"58fab5d7-620a-47aa-9df0-35e587e79318\" (UID: \"58fab5d7-620a-47aa-9df0-35e587e79318\") " Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.672066 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58fab5d7-620a-47aa-9df0-35e587e79318-config\") pod \"58fab5d7-620a-47aa-9df0-35e587e79318\" (UID: \"58fab5d7-620a-47aa-9df0-35e587e79318\") " Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.672158 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/58fab5d7-620a-47aa-9df0-35e587e79318-ovsdbserver-nb\") pod \"58fab5d7-620a-47aa-9df0-35e587e79318\" (UID: \"58fab5d7-620a-47aa-9df0-35e587e79318\") " Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.672198 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/58fab5d7-620a-47aa-9df0-35e587e79318-ovsdbserver-sb\") pod \"58fab5d7-620a-47aa-9df0-35e587e79318\" (UID: \"58fab5d7-620a-47aa-9df0-35e587e79318\") " Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.672271 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-542cp\" (UniqueName: \"kubernetes.io/projected/58fab5d7-620a-47aa-9df0-35e587e79318-kube-api-access-542cp\") pod \"58fab5d7-620a-47aa-9df0-35e587e79318\" (UID: \"58fab5d7-620a-47aa-9df0-35e587e79318\") " Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.672549 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-5jwbz\" (UID: \"6428ca59-598a-417c-a4b0-5f1cabe400cf\") " pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.672582 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2879x\" (UniqueName: \"kubernetes.io/projected/6428ca59-598a-417c-a4b0-5f1cabe400cf-kube-api-access-2879x\") pod \"dnsmasq-dns-56df8fb6b7-5jwbz\" (UID: \"6428ca59-598a-417c-a4b0-5f1cabe400cf\") " pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.672609 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-5jwbz\" (UID: \"6428ca59-598a-417c-a4b0-5f1cabe400cf\") " pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.672631 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44ca8382-9045-4817-b73f-3c885e446fab-combined-ca-bundle\") pod \"placement-db-sync-sj9ql\" (UID: \"44ca8382-9045-4817-b73f-3c885e446fab\") " pod="openstack/placement-db-sync-sj9ql" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.672661 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/46a44949-0829-489c-8baf-31966a61641f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " pod="openstack/ceilometer-0" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.672691 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46a44949-0829-489c-8baf-31966a61641f-scripts\") pod \"ceilometer-0\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " pod="openstack/ceilometer-0" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.672779 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46a44949-0829-489c-8baf-31966a61641f-config-data\") pod \"ceilometer-0\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " pod="openstack/ceilometer-0" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.672807 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46a44949-0829-489c-8baf-31966a61641f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " pod="openstack/ceilometer-0" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.672835 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46a44949-0829-489c-8baf-31966a61641f-log-httpd\") pod \"ceilometer-0\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " pod="openstack/ceilometer-0" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.672867 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46a44949-0829-489c-8baf-31966a61641f-run-httpd\") pod \"ceilometer-0\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " pod="openstack/ceilometer-0" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.672897 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-5jwbz\" (UID: \"6428ca59-598a-417c-a4b0-5f1cabe400cf\") " pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.672923 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-5jwbz\" (UID: \"6428ca59-598a-417c-a4b0-5f1cabe400cf\") " pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.672944 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54jm7\" (UniqueName: \"kubernetes.io/projected/46a44949-0829-489c-8baf-31966a61641f-kube-api-access-54jm7\") pod \"ceilometer-0\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " pod="openstack/ceilometer-0" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.672969 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-config\") pod \"dnsmasq-dns-56df8fb6b7-5jwbz\" (UID: \"6428ca59-598a-417c-a4b0-5f1cabe400cf\") " pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.673001 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/44ca8382-9045-4817-b73f-3c885e446fab-logs\") pod \"placement-db-sync-sj9ql\" (UID: \"44ca8382-9045-4817-b73f-3c885e446fab\") " pod="openstack/placement-db-sync-sj9ql" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.673032 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44ca8382-9045-4817-b73f-3c885e446fab-config-data\") pod \"placement-db-sync-sj9ql\" (UID: \"44ca8382-9045-4817-b73f-3c885e446fab\") " pod="openstack/placement-db-sync-sj9ql" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.673099 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62g6z\" (UniqueName: \"kubernetes.io/projected/44ca8382-9045-4817-b73f-3c885e446fab-kube-api-access-62g6z\") pod \"placement-db-sync-sj9ql\" (UID: \"44ca8382-9045-4817-b73f-3c885e446fab\") " pod="openstack/placement-db-sync-sj9ql" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.673123 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44ca8382-9045-4817-b73f-3c885e446fab-scripts\") pod \"placement-db-sync-sj9ql\" (UID: \"44ca8382-9045-4817-b73f-3c885e446fab\") " pod="openstack/placement-db-sync-sj9ql" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.678032 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-5jwbz\" (UID: \"6428ca59-598a-417c-a4b0-5f1cabe400cf\") " pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.678476 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-5jwbz\" (UID: \"6428ca59-598a-417c-a4b0-5f1cabe400cf\") " pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.679048 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-5jwbz\" (UID: \"6428ca59-598a-417c-a4b0-5f1cabe400cf\") " pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.679564 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-config\") pod \"dnsmasq-dns-56df8fb6b7-5jwbz\" (UID: \"6428ca59-598a-417c-a4b0-5f1cabe400cf\") " pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.679866 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/44ca8382-9045-4817-b73f-3c885e446fab-logs\") pod \"placement-db-sync-sj9ql\" (UID: \"44ca8382-9045-4817-b73f-3c885e446fab\") " pod="openstack/placement-db-sync-sj9ql" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.684670 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58fab5d7-620a-47aa-9df0-35e587e79318-kube-api-access-542cp" (OuterVolumeSpecName: "kube-api-access-542cp") pod "58fab5d7-620a-47aa-9df0-35e587e79318" (UID: "58fab5d7-620a-47aa-9df0-35e587e79318"). InnerVolumeSpecName "kube-api-access-542cp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.689674 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-5jwbz\" (UID: \"6428ca59-598a-417c-a4b0-5f1cabe400cf\") " pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.698183 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44ca8382-9045-4817-b73f-3c885e446fab-scripts\") pod \"placement-db-sync-sj9ql\" (UID: \"44ca8382-9045-4817-b73f-3c885e446fab\") " pod="openstack/placement-db-sync-sj9ql" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.698214 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44ca8382-9045-4817-b73f-3c885e446fab-config-data\") pod \"placement-db-sync-sj9ql\" (UID: \"44ca8382-9045-4817-b73f-3c885e446fab\") " pod="openstack/placement-db-sync-sj9ql" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.701165 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62g6z\" (UniqueName: \"kubernetes.io/projected/44ca8382-9045-4817-b73f-3c885e446fab-kube-api-access-62g6z\") pod \"placement-db-sync-sj9ql\" (UID: \"44ca8382-9045-4817-b73f-3c885e446fab\") " pod="openstack/placement-db-sync-sj9ql" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.702702 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44ca8382-9045-4817-b73f-3c885e446fab-combined-ca-bundle\") pod \"placement-db-sync-sj9ql\" (UID: \"44ca8382-9045-4817-b73f-3c885e446fab\") " pod="openstack/placement-db-sync-sj9ql" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.704809 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2879x\" (UniqueName: \"kubernetes.io/projected/6428ca59-598a-417c-a4b0-5f1cabe400cf-kube-api-access-2879x\") pod \"dnsmasq-dns-56df8fb6b7-5jwbz\" (UID: \"6428ca59-598a-417c-a4b0-5f1cabe400cf\") " pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.708582 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-pwgmc" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.719941 4869 scope.go:117] "RemoveContainer" containerID="68d49cec397a34f793d27a79e235f23845370377aefe0db471de78c183dae9c8" Jan 30 11:12:16 crc kubenswrapper[4869]: E0130 11:12:16.728251 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68d49cec397a34f793d27a79e235f23845370377aefe0db471de78c183dae9c8\": container with ID starting with 68d49cec397a34f793d27a79e235f23845370377aefe0db471de78c183dae9c8 not found: ID does not exist" containerID="68d49cec397a34f793d27a79e235f23845370377aefe0db471de78c183dae9c8" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.728307 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68d49cec397a34f793d27a79e235f23845370377aefe0db471de78c183dae9c8"} err="failed to get container status \"68d49cec397a34f793d27a79e235f23845370377aefe0db471de78c183dae9c8\": rpc error: code = NotFound desc = could not find container \"68d49cec397a34f793d27a79e235f23845370377aefe0db471de78c183dae9c8\": container with ID starting with 68d49cec397a34f793d27a79e235f23845370377aefe0db471de78c183dae9c8 not found: ID does not exist" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.728336 4869 scope.go:117] "RemoveContainer" containerID="732d9e658eb07f2f4a62814c0b91bd858365ec70a789dec7f657f1b41f5769f8" Jan 30 11:12:16 crc kubenswrapper[4869]: E0130 11:12:16.729542 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"732d9e658eb07f2f4a62814c0b91bd858365ec70a789dec7f657f1b41f5769f8\": container with ID starting with 732d9e658eb07f2f4a62814c0b91bd858365ec70a789dec7f657f1b41f5769f8 not found: ID does not exist" containerID="732d9e658eb07f2f4a62814c0b91bd858365ec70a789dec7f657f1b41f5769f8" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.729590 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"732d9e658eb07f2f4a62814c0b91bd858365ec70a789dec7f657f1b41f5769f8"} err="failed to get container status \"732d9e658eb07f2f4a62814c0b91bd858365ec70a789dec7f657f1b41f5769f8\": rpc error: code = NotFound desc = could not find container \"732d9e658eb07f2f4a62814c0b91bd858365ec70a789dec7f657f1b41f5769f8\": container with ID starting with 732d9e658eb07f2f4a62814c0b91bd858365ec70a789dec7f657f1b41f5769f8 not found: ID does not exist" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.746094 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-5ctps" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.767908 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58fab5d7-620a-47aa-9df0-35e587e79318-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "58fab5d7-620a-47aa-9df0-35e587e79318" (UID: "58fab5d7-620a-47aa-9df0-35e587e79318"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.774638 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/46a44949-0829-489c-8baf-31966a61641f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " pod="openstack/ceilometer-0" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.774741 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46a44949-0829-489c-8baf-31966a61641f-scripts\") pod \"ceilometer-0\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " pod="openstack/ceilometer-0" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.774784 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46a44949-0829-489c-8baf-31966a61641f-config-data\") pod \"ceilometer-0\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " pod="openstack/ceilometer-0" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.774808 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46a44949-0829-489c-8baf-31966a61641f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " pod="openstack/ceilometer-0" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.774836 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46a44949-0829-489c-8baf-31966a61641f-log-httpd\") pod \"ceilometer-0\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " pod="openstack/ceilometer-0" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.774869 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46a44949-0829-489c-8baf-31966a61641f-run-httpd\") pod \"ceilometer-0\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " pod="openstack/ceilometer-0" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.774901 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54jm7\" (UniqueName: \"kubernetes.io/projected/46a44949-0829-489c-8baf-31966a61641f-kube-api-access-54jm7\") pod \"ceilometer-0\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " pod="openstack/ceilometer-0" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.774997 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-542cp\" (UniqueName: \"kubernetes.io/projected/58fab5d7-620a-47aa-9df0-35e587e79318-kube-api-access-542cp\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.775010 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/58fab5d7-620a-47aa-9df0-35e587e79318-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.775571 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58fab5d7-620a-47aa-9df0-35e587e79318-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "58fab5d7-620a-47aa-9df0-35e587e79318" (UID: "58fab5d7-620a-47aa-9df0-35e587e79318"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.775947 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46a44949-0829-489c-8baf-31966a61641f-log-httpd\") pod \"ceilometer-0\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " pod="openstack/ceilometer-0" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.776250 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46a44949-0829-489c-8baf-31966a61641f-run-httpd\") pod \"ceilometer-0\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " pod="openstack/ceilometer-0" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.780670 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46a44949-0829-489c-8baf-31966a61641f-config-data\") pod \"ceilometer-0\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " pod="openstack/ceilometer-0" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.781368 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46a44949-0829-489c-8baf-31966a61641f-scripts\") pod \"ceilometer-0\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " pod="openstack/ceilometer-0" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.783956 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/46a44949-0829-489c-8baf-31966a61641f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " pod="openstack/ceilometer-0" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.785649 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46a44949-0829-489c-8baf-31966a61641f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " pod="openstack/ceilometer-0" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.791863 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58fab5d7-620a-47aa-9df0-35e587e79318-config" (OuterVolumeSpecName: "config") pod "58fab5d7-620a-47aa-9df0-35e587e79318" (UID: "58fab5d7-620a-47aa-9df0-35e587e79318"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.793104 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54jm7\" (UniqueName: \"kubernetes.io/projected/46a44949-0829-489c-8baf-31966a61641f-kube-api-access-54jm7\") pod \"ceilometer-0\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " pod="openstack/ceilometer-0" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.800857 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58fab5d7-620a-47aa-9df0-35e587e79318-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "58fab5d7-620a-47aa-9df0-35e587e79318" (UID: "58fab5d7-620a-47aa-9df0-35e587e79318"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.816189 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-mr25n" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.846782 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.876481 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/58fab5d7-620a-47aa-9df0-35e587e79318-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.876504 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/58fab5d7-620a-47aa-9df0-35e587e79318-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.876516 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58fab5d7-620a-47aa-9df0-35e587e79318-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.893260 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-sj9ql" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.925032 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.958951 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-hh5lm"] Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.998272 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 11:12:16 crc kubenswrapper[4869]: E0130 11:12:16.998772 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58fab5d7-620a-47aa-9df0-35e587e79318" containerName="dnsmasq-dns" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.998787 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="58fab5d7-620a-47aa-9df0-35e587e79318" containerName="dnsmasq-dns" Jan 30 11:12:16 crc kubenswrapper[4869]: E0130 11:12:16.998823 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58fab5d7-620a-47aa-9df0-35e587e79318" containerName="init" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.998831 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="58fab5d7-620a-47aa-9df0-35e587e79318" containerName="init" Jan 30 11:12:16 crc kubenswrapper[4869]: I0130 11:12:16.999013 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="58fab5d7-620a-47aa-9df0-35e587e79318" containerName="dnsmasq-dns" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.000114 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.003778 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.004031 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.004422 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-jfgbn" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.006561 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.032779 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.069418 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-8hhhx"] Jan 30 11:12:17 crc kubenswrapper[4869]: W0130 11:12:17.099945 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd399c117_cbff_4044_8b48_54a5c0cc4a2c.slice/crio-2d115bfebee668318a8b75a7a8ef7d395a8813667ffea0928ae0f13e792206cf WatchSource:0}: Error finding container 2d115bfebee668318a8b75a7a8ef7d395a8813667ffea0928ae0f13e792206cf: Status 404 returned error can't find the container with id 2d115bfebee668318a8b75a7a8ef7d395a8813667ffea0928ae0f13e792206cf Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.101830 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c32999d5-8b78-4873-bef7-fbbe2935b2ba-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.101874 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c32999d5-8b78-4873-bef7-fbbe2935b2ba-scripts\") pod \"glance-default-external-api-0\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.101905 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.101929 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c32999d5-8b78-4873-bef7-fbbe2935b2ba-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.101962 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c32999d5-8b78-4873-bef7-fbbe2935b2ba-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.101988 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c32999d5-8b78-4873-bef7-fbbe2935b2ba-logs\") pod \"glance-default-external-api-0\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.102051 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c32999d5-8b78-4873-bef7-fbbe2935b2ba-config-data\") pod \"glance-default-external-api-0\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.102115 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m86fv\" (UniqueName: \"kubernetes.io/projected/c32999d5-8b78-4873-bef7-fbbe2935b2ba-kube-api-access-m86fv\") pod \"glance-default-external-api-0\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.175763 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.177581 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.179672 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.181540 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.184881 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.202045 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-gddbs"] Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.204024 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c32999d5-8b78-4873-bef7-fbbe2935b2ba-config-data\") pod \"glance-default-external-api-0\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.204178 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m86fv\" (UniqueName: \"kubernetes.io/projected/c32999d5-8b78-4873-bef7-fbbe2935b2ba-kube-api-access-m86fv\") pod \"glance-default-external-api-0\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.204354 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c32999d5-8b78-4873-bef7-fbbe2935b2ba-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.204382 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c32999d5-8b78-4873-bef7-fbbe2935b2ba-scripts\") pod \"glance-default-external-api-0\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.204419 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.204461 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c32999d5-8b78-4873-bef7-fbbe2935b2ba-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.204511 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c32999d5-8b78-4873-bef7-fbbe2935b2ba-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.204550 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c32999d5-8b78-4873-bef7-fbbe2935b2ba-logs\") pod \"glance-default-external-api-0\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.205737 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.221133 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c32999d5-8b78-4873-bef7-fbbe2935b2ba-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.260478 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c32999d5-8b78-4873-bef7-fbbe2935b2ba-config-data\") pod \"glance-default-external-api-0\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.260787 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c32999d5-8b78-4873-bef7-fbbe2935b2ba-logs\") pod \"glance-default-external-api-0\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.261475 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c32999d5-8b78-4873-bef7-fbbe2935b2ba-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.266834 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-gddbs"] Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.272237 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c32999d5-8b78-4873-bef7-fbbe2935b2ba-scripts\") pod \"glance-default-external-api-0\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.273355 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c32999d5-8b78-4873-bef7-fbbe2935b2ba-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.307022 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.307061 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b3b0759-f110-4876-8b76-19c5ee562917-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.307466 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b3b0759-f110-4876-8b76-19c5ee562917-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.307664 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5b3b0759-f110-4876-8b76-19c5ee562917-logs\") pod \"glance-default-internal-api-0\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.310315 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b3b0759-f110-4876-8b76-19c5ee562917-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.310505 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b3b0759-f110-4876-8b76-19c5ee562917-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.310905 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5b3b0759-f110-4876-8b76-19c5ee562917-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.311082 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9h4zk\" (UniqueName: \"kubernetes.io/projected/5b3b0759-f110-4876-8b76-19c5ee562917-kube-api-access-9h4zk\") pod \"glance-default-internal-api-0\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.318033 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m86fv\" (UniqueName: \"kubernetes.io/projected/c32999d5-8b78-4873-bef7-fbbe2935b2ba-kube-api-access-m86fv\") pod \"glance-default-external-api-0\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.319085 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.412456 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.412941 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b3b0759-f110-4876-8b76-19c5ee562917-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.412968 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b3b0759-f110-4876-8b76-19c5ee562917-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.413032 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5b3b0759-f110-4876-8b76-19c5ee562917-logs\") pod \"glance-default-internal-api-0\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.413069 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b3b0759-f110-4876-8b76-19c5ee562917-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.413102 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b3b0759-f110-4876-8b76-19c5ee562917-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.413103 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.413132 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5b3b0759-f110-4876-8b76-19c5ee562917-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.413163 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9h4zk\" (UniqueName: \"kubernetes.io/projected/5b3b0759-f110-4876-8b76-19c5ee562917-kube-api-access-9h4zk\") pod \"glance-default-internal-api-0\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.414053 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5b3b0759-f110-4876-8b76-19c5ee562917-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.417136 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5b3b0759-f110-4876-8b76-19c5ee562917-logs\") pod \"glance-default-internal-api-0\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.418319 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b3b0759-f110-4876-8b76-19c5ee562917-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.418672 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b3b0759-f110-4876-8b76-19c5ee562917-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.419011 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b3b0759-f110-4876-8b76-19c5ee562917-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.419213 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b3b0759-f110-4876-8b76-19c5ee562917-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.433727 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9h4zk\" (UniqueName: \"kubernetes.io/projected/5b3b0759-f110-4876-8b76-19c5ee562917-kube-api-access-9h4zk\") pod \"glance-default-internal-api-0\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.442042 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.485110 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.523172 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.716989 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-8hhhx" event={"ID":"d399c117-cbff-4044-8b48-54a5c0cc4a2c","Type":"ContainerStarted","Data":"f44e61413ed4598ca9cdf8a2649ba579d0a8e3d211a34b0fc3ad0dc71501c511"} Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.717038 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-8hhhx" event={"ID":"d399c117-cbff-4044-8b48-54a5c0cc4a2c","Type":"ContainerStarted","Data":"2d115bfebee668318a8b75a7a8ef7d395a8813667ffea0928ae0f13e792206cf"} Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.735405 4869 generic.go:334] "Generic (PLEG): container finished" podID="4443c8c6-09f0-456c-8d9f-2a08c7861dd7" containerID="56e29d0c6f4b314fa557347fc8a561f12e517b14b5fed9d977cc5f0668fb202b" exitCode=0 Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.735442 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bbf5cc879-hh5lm" event={"ID":"4443c8c6-09f0-456c-8d9f-2a08c7861dd7","Type":"ContainerDied","Data":"56e29d0c6f4b314fa557347fc8a561f12e517b14b5fed9d977cc5f0668fb202b"} Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.735467 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bbf5cc879-hh5lm" event={"ID":"4443c8c6-09f0-456c-8d9f-2a08c7861dd7","Type":"ContainerStarted","Data":"a1a5e8422df266c859ead3d0b75ed5d51e9bb2dc8d86df5c5f327cab826f8f98"} Jan 30 11:12:17 crc kubenswrapper[4869]: I0130 11:12:17.757536 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-8hhhx" podStartSLOduration=2.757514445 podStartE2EDuration="2.757514445s" podCreationTimestamp="2026-01-30 11:12:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:12:17.741440989 +0000 UTC m=+1088.291317055" watchObservedRunningTime="2026-01-30 11:12:17.757514445 +0000 UTC m=+1088.307390511" Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.088910 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-pwgmc"] Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.220303 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58fab5d7-620a-47aa-9df0-35e587e79318" path="/var/lib/kubelet/pods/58fab5d7-620a-47aa-9df0-35e587e79318/volumes" Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.221195 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-5ctps"] Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.443376 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-sj9ql"] Jan 30 11:12:18 crc kubenswrapper[4869]: W0130 11:12:18.457103 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod44ca8382_9045_4817_b73f_3c885e446fab.slice/crio-d18bb30273bad7356e7fa9119b473e23410567e8bfab0570cb7c38180bd872e1 WatchSource:0}: Error finding container d18bb30273bad7356e7fa9119b473e23410567e8bfab0570cb7c38180bd872e1: Status 404 returned error can't find the container with id d18bb30273bad7356e7fa9119b473e23410567e8bfab0570cb7c38180bd872e1 Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.495582 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-5jwbz"] Jan 30 11:12:18 crc kubenswrapper[4869]: W0130 11:12:18.537472 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ecba8db_afd9_4f76_b5f1_61acfb49bd68.slice/crio-1b87b05e98442937239653203ad84ca6db24618a26106756a989d99a7a90b29c WatchSource:0}: Error finding container 1b87b05e98442937239653203ad84ca6db24618a26106756a989d99a7a90b29c: Status 404 returned error can't find the container with id 1b87b05e98442937239653203ad84ca6db24618a26106756a989d99a7a90b29c Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.539771 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-mr25n"] Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.646983 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbf5cc879-hh5lm" Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.760194 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-ovsdbserver-nb\") pod \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\" (UID: \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\") " Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.760249 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-dns-svc\") pod \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\" (UID: \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\") " Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.760288 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-dns-swift-storage-0\") pod \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\" (UID: \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\") " Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.760369 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wf4db\" (UniqueName: \"kubernetes.io/projected/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-kube-api-access-wf4db\") pod \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\" (UID: \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\") " Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.760561 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-ovsdbserver-sb\") pod \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\" (UID: \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\") " Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.760607 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-config\") pod \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\" (UID: \"4443c8c6-09f0-456c-8d9f-2a08c7861dd7\") " Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.785376 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.788168 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-kube-api-access-wf4db" (OuterVolumeSpecName: "kube-api-access-wf4db") pod "4443c8c6-09f0-456c-8d9f-2a08c7861dd7" (UID: "4443c8c6-09f0-456c-8d9f-2a08c7861dd7"). InnerVolumeSpecName "kube-api-access-wf4db". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.800021 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "4443c8c6-09f0-456c-8d9f-2a08c7861dd7" (UID: "4443c8c6-09f0-456c-8d9f-2a08c7861dd7"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.857302 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4443c8c6-09f0-456c-8d9f-2a08c7861dd7" (UID: "4443c8c6-09f0-456c-8d9f-2a08c7861dd7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.857621 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4443c8c6-09f0-456c-8d9f-2a08c7861dd7" (UID: "4443c8c6-09f0-456c-8d9f-2a08c7861dd7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.858992 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-config" (OuterVolumeSpecName: "config") pod "4443c8c6-09f0-456c-8d9f-2a08c7861dd7" (UID: "4443c8c6-09f0-456c-8d9f-2a08c7861dd7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.859186 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-5ctps" event={"ID":"03699fa5-87c3-42b4-907b-586fa9d208af","Type":"ContainerStarted","Data":"30a1f78919c5f2985046d4aedc542ed19f05c1b985bd0938356d82396a65b898"} Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.870096 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.870148 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.870160 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.870174 4869 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.870186 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wf4db\" (UniqueName: \"kubernetes.io/projected/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-kube-api-access-wf4db\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.876958 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-sj9ql" event={"ID":"44ca8382-9045-4817-b73f-3c885e446fab","Type":"ContainerStarted","Data":"d18bb30273bad7356e7fa9119b473e23410567e8bfab0570cb7c38180bd872e1"} Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.884060 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4443c8c6-09f0-456c-8d9f-2a08c7861dd7" (UID: "4443c8c6-09f0-456c-8d9f-2a08c7861dd7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.886489 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" event={"ID":"6428ca59-598a-417c-a4b0-5f1cabe400cf","Type":"ContainerStarted","Data":"ae86b4ab7cea6a94c640f6d2dbee1365ec42fa8c53538f6d66fdc9a287f59f55"} Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.889082 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bbf5cc879-hh5lm" event={"ID":"4443c8c6-09f0-456c-8d9f-2a08c7861dd7","Type":"ContainerDied","Data":"a1a5e8422df266c859ead3d0b75ed5d51e9bb2dc8d86df5c5f327cab826f8f98"} Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.889124 4869 scope.go:117] "RemoveContainer" containerID="56e29d0c6f4b314fa557347fc8a561f12e517b14b5fed9d977cc5f0668fb202b" Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.889225 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbf5cc879-hh5lm" Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.898526 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-mr25n" event={"ID":"5ecba8db-afd9-4f76-b5f1-61acfb49bd68","Type":"ContainerStarted","Data":"1b87b05e98442937239653203ad84ca6db24618a26106756a989d99a7a90b29c"} Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.904672 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.909772 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-pwgmc" event={"ID":"b116442d-9126-417b-a8cf-b36c70966e46","Type":"ContainerStarted","Data":"ee029ceb18de8c7ea264247d2be5d95f2b3fc59d8b8c4a57e75cfd5dadb30322"} Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.909805 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-pwgmc" event={"ID":"b116442d-9126-417b-a8cf-b36c70966e46","Type":"ContainerStarted","Data":"641445cf0b6213b88ab98e12a0a92ef959f5c4ce17f6f1ad534c914727bd9ec4"} Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.952354 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-pwgmc" podStartSLOduration=2.952330937 podStartE2EDuration="2.952330937s" podCreationTimestamp="2026-01-30 11:12:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:12:18.929475939 +0000 UTC m=+1089.479352005" watchObservedRunningTime="2026-01-30 11:12:18.952330937 +0000 UTC m=+1089.502207003" Jan 30 11:12:18 crc kubenswrapper[4869]: I0130 11:12:18.985235 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4443c8c6-09f0-456c-8d9f-2a08c7861dd7-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:19 crc kubenswrapper[4869]: I0130 11:12:19.142607 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 11:12:19 crc kubenswrapper[4869]: I0130 11:12:19.182372 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:12:19 crc kubenswrapper[4869]: I0130 11:12:19.233160 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-hh5lm"] Jan 30 11:12:19 crc kubenswrapper[4869]: I0130 11:12:19.270389 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-hh5lm"] Jan 30 11:12:19 crc kubenswrapper[4869]: I0130 11:12:19.280352 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 11:12:19 crc kubenswrapper[4869]: I0130 11:12:19.680720 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 11:12:19 crc kubenswrapper[4869]: I0130 11:12:19.935701 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5b3b0759-f110-4876-8b76-19c5ee562917","Type":"ContainerStarted","Data":"338c8d78f6b48499917c7b371b284c55afd14a47236c821e24de466a6be54396"} Jan 30 11:12:19 crc kubenswrapper[4869]: I0130 11:12:19.941033 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c32999d5-8b78-4873-bef7-fbbe2935b2ba","Type":"ContainerStarted","Data":"8681de9475fe0f49fded771d94e2350ef92d54e75e976294603c58cf9b8f9e5e"} Jan 30 11:12:19 crc kubenswrapper[4869]: I0130 11:12:19.946387 4869 generic.go:334] "Generic (PLEG): container finished" podID="6428ca59-598a-417c-a4b0-5f1cabe400cf" containerID="d835c138d83c59a91f1630d5f7b615640b0f815409047e370278c622d8e4c396" exitCode=0 Jan 30 11:12:19 crc kubenswrapper[4869]: I0130 11:12:19.946467 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" event={"ID":"6428ca59-598a-417c-a4b0-5f1cabe400cf","Type":"ContainerDied","Data":"d835c138d83c59a91f1630d5f7b615640b0f815409047e370278c622d8e4c396"} Jan 30 11:12:19 crc kubenswrapper[4869]: I0130 11:12:19.957346 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"46a44949-0829-489c-8baf-31966a61641f","Type":"ContainerStarted","Data":"60a185752ceaea7bccb9614da6fe38adbc410d356eb5cf9ab9f868b9e633d7a3"} Jan 30 11:12:20 crc kubenswrapper[4869]: I0130 11:12:20.164281 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4443c8c6-09f0-456c-8d9f-2a08c7861dd7" path="/var/lib/kubelet/pods/4443c8c6-09f0-456c-8d9f-2a08c7861dd7/volumes" Jan 30 11:12:20 crc kubenswrapper[4869]: I0130 11:12:20.975193 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5b3b0759-f110-4876-8b76-19c5ee562917","Type":"ContainerStarted","Data":"4118833298eb6293a9d8d4a8933820e1b04242a4c53a576c6cf994267635a5ab"} Jan 30 11:12:20 crc kubenswrapper[4869]: I0130 11:12:20.978655 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c32999d5-8b78-4873-bef7-fbbe2935b2ba","Type":"ContainerStarted","Data":"b5455052d9ca36801f429fbf0dcb72e53940e653dcb12ebbd0180d76a87884cf"} Jan 30 11:12:20 crc kubenswrapper[4869]: I0130 11:12:20.982817 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" event={"ID":"6428ca59-598a-417c-a4b0-5f1cabe400cf","Type":"ContainerStarted","Data":"d20a5b414c144535904b5d82437c17681cd88daa6480c3e93f4b6c8692713739"} Jan 30 11:12:20 crc kubenswrapper[4869]: I0130 11:12:20.982996 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" Jan 30 11:12:21 crc kubenswrapper[4869]: I0130 11:12:21.012317 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" podStartSLOduration=5.012298795 podStartE2EDuration="5.012298795s" podCreationTimestamp="2026-01-30 11:12:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:12:21.010676789 +0000 UTC m=+1091.560552865" watchObservedRunningTime="2026-01-30 11:12:21.012298795 +0000 UTC m=+1091.562174861" Jan 30 11:12:21 crc kubenswrapper[4869]: I0130 11:12:21.996575 4869 generic.go:334] "Generic (PLEG): container finished" podID="d399c117-cbff-4044-8b48-54a5c0cc4a2c" containerID="f44e61413ed4598ca9cdf8a2649ba579d0a8e3d211a34b0fc3ad0dc71501c511" exitCode=0 Jan 30 11:12:21 crc kubenswrapper[4869]: I0130 11:12:21.996655 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-8hhhx" event={"ID":"d399c117-cbff-4044-8b48-54a5c0cc4a2c","Type":"ContainerDied","Data":"f44e61413ed4598ca9cdf8a2649ba579d0a8e3d211a34b0fc3ad0dc71501c511"} Jan 30 11:12:22 crc kubenswrapper[4869]: I0130 11:12:22.000169 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5b3b0759-f110-4876-8b76-19c5ee562917","Type":"ContainerStarted","Data":"9cdf7fa0935ee83a447f2e288383139eb437c7087e260988063aff01a8b10c94"} Jan 30 11:12:22 crc kubenswrapper[4869]: I0130 11:12:22.000274 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="5b3b0759-f110-4876-8b76-19c5ee562917" containerName="glance-log" containerID="cri-o://4118833298eb6293a9d8d4a8933820e1b04242a4c53a576c6cf994267635a5ab" gracePeriod=30 Jan 30 11:12:22 crc kubenswrapper[4869]: I0130 11:12:22.000315 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="5b3b0759-f110-4876-8b76-19c5ee562917" containerName="glance-httpd" containerID="cri-o://9cdf7fa0935ee83a447f2e288383139eb437c7087e260988063aff01a8b10c94" gracePeriod=30 Jan 30 11:12:22 crc kubenswrapper[4869]: I0130 11:12:22.003515 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="c32999d5-8b78-4873-bef7-fbbe2935b2ba" containerName="glance-log" containerID="cri-o://b5455052d9ca36801f429fbf0dcb72e53940e653dcb12ebbd0180d76a87884cf" gracePeriod=30 Jan 30 11:12:22 crc kubenswrapper[4869]: I0130 11:12:22.003574 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="c32999d5-8b78-4873-bef7-fbbe2935b2ba" containerName="glance-httpd" containerID="cri-o://8f4cd69838b8a22c789b8e9879b8a2757639954e1c0e5866c3fe7bcc330f414f" gracePeriod=30 Jan 30 11:12:22 crc kubenswrapper[4869]: I0130 11:12:22.003491 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c32999d5-8b78-4873-bef7-fbbe2935b2ba","Type":"ContainerStarted","Data":"8f4cd69838b8a22c789b8e9879b8a2757639954e1c0e5866c3fe7bcc330f414f"} Jan 30 11:12:22 crc kubenswrapper[4869]: I0130 11:12:22.077275 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.07725472 podStartE2EDuration="6.07725472s" podCreationTimestamp="2026-01-30 11:12:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:12:22.058415795 +0000 UTC m=+1092.608291881" watchObservedRunningTime="2026-01-30 11:12:22.07725472 +0000 UTC m=+1092.627130786" Jan 30 11:12:22 crc kubenswrapper[4869]: I0130 11:12:22.112979 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=7.112956293 podStartE2EDuration="7.112956293s" podCreationTimestamp="2026-01-30 11:12:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:12:22.105680737 +0000 UTC m=+1092.655556803" watchObservedRunningTime="2026-01-30 11:12:22.112956293 +0000 UTC m=+1092.662832359" Jan 30 11:12:23 crc kubenswrapper[4869]: I0130 11:12:23.018567 4869 generic.go:334] "Generic (PLEG): container finished" podID="5b3b0759-f110-4876-8b76-19c5ee562917" containerID="9cdf7fa0935ee83a447f2e288383139eb437c7087e260988063aff01a8b10c94" exitCode=0 Jan 30 11:12:23 crc kubenswrapper[4869]: I0130 11:12:23.018600 4869 generic.go:334] "Generic (PLEG): container finished" podID="5b3b0759-f110-4876-8b76-19c5ee562917" containerID="4118833298eb6293a9d8d4a8933820e1b04242a4c53a576c6cf994267635a5ab" exitCode=143 Jan 30 11:12:23 crc kubenswrapper[4869]: I0130 11:12:23.018641 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5b3b0759-f110-4876-8b76-19c5ee562917","Type":"ContainerDied","Data":"9cdf7fa0935ee83a447f2e288383139eb437c7087e260988063aff01a8b10c94"} Jan 30 11:12:23 crc kubenswrapper[4869]: I0130 11:12:23.018691 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5b3b0759-f110-4876-8b76-19c5ee562917","Type":"ContainerDied","Data":"4118833298eb6293a9d8d4a8933820e1b04242a4c53a576c6cf994267635a5ab"} Jan 30 11:12:23 crc kubenswrapper[4869]: I0130 11:12:23.021373 4869 generic.go:334] "Generic (PLEG): container finished" podID="c32999d5-8b78-4873-bef7-fbbe2935b2ba" containerID="8f4cd69838b8a22c789b8e9879b8a2757639954e1c0e5866c3fe7bcc330f414f" exitCode=143 Jan 30 11:12:23 crc kubenswrapper[4869]: I0130 11:12:23.021398 4869 generic.go:334] "Generic (PLEG): container finished" podID="c32999d5-8b78-4873-bef7-fbbe2935b2ba" containerID="b5455052d9ca36801f429fbf0dcb72e53940e653dcb12ebbd0180d76a87884cf" exitCode=143 Jan 30 11:12:23 crc kubenswrapper[4869]: I0130 11:12:23.021563 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c32999d5-8b78-4873-bef7-fbbe2935b2ba","Type":"ContainerDied","Data":"8f4cd69838b8a22c789b8e9879b8a2757639954e1c0e5866c3fe7bcc330f414f"} Jan 30 11:12:23 crc kubenswrapper[4869]: I0130 11:12:23.021596 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c32999d5-8b78-4873-bef7-fbbe2935b2ba","Type":"ContainerDied","Data":"b5455052d9ca36801f429fbf0dcb72e53940e653dcb12ebbd0180d76a87884cf"} Jan 30 11:12:26 crc kubenswrapper[4869]: I0130 11:12:26.548744 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-8hhhx" Jan 30 11:12:26 crc kubenswrapper[4869]: I0130 11:12:26.577943 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-credential-keys\") pod \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\" (UID: \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\") " Jan 30 11:12:26 crc kubenswrapper[4869]: I0130 11:12:26.578008 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-combined-ca-bundle\") pod \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\" (UID: \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\") " Jan 30 11:12:26 crc kubenswrapper[4869]: I0130 11:12:26.578298 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-fernet-keys\") pod \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\" (UID: \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\") " Jan 30 11:12:26 crc kubenswrapper[4869]: I0130 11:12:26.579219 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktbbq\" (UniqueName: \"kubernetes.io/projected/d399c117-cbff-4044-8b48-54a5c0cc4a2c-kube-api-access-ktbbq\") pod \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\" (UID: \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\") " Jan 30 11:12:26 crc kubenswrapper[4869]: I0130 11:12:26.579372 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-scripts\") pod \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\" (UID: \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\") " Jan 30 11:12:26 crc kubenswrapper[4869]: I0130 11:12:26.579447 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-config-data\") pod \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\" (UID: \"d399c117-cbff-4044-8b48-54a5c0cc4a2c\") " Jan 30 11:12:26 crc kubenswrapper[4869]: I0130 11:12:26.585892 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d399c117-cbff-4044-8b48-54a5c0cc4a2c-kube-api-access-ktbbq" (OuterVolumeSpecName: "kube-api-access-ktbbq") pod "d399c117-cbff-4044-8b48-54a5c0cc4a2c" (UID: "d399c117-cbff-4044-8b48-54a5c0cc4a2c"). InnerVolumeSpecName "kube-api-access-ktbbq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:12:26 crc kubenswrapper[4869]: I0130 11:12:26.604095 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-scripts" (OuterVolumeSpecName: "scripts") pod "d399c117-cbff-4044-8b48-54a5c0cc4a2c" (UID: "d399c117-cbff-4044-8b48-54a5c0cc4a2c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:26 crc kubenswrapper[4869]: I0130 11:12:26.604410 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "d399c117-cbff-4044-8b48-54a5c0cc4a2c" (UID: "d399c117-cbff-4044-8b48-54a5c0cc4a2c"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:26 crc kubenswrapper[4869]: I0130 11:12:26.605055 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "d399c117-cbff-4044-8b48-54a5c0cc4a2c" (UID: "d399c117-cbff-4044-8b48-54a5c0cc4a2c"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:26 crc kubenswrapper[4869]: I0130 11:12:26.613071 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d399c117-cbff-4044-8b48-54a5c0cc4a2c" (UID: "d399c117-cbff-4044-8b48-54a5c0cc4a2c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:26 crc kubenswrapper[4869]: I0130 11:12:26.636783 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-config-data" (OuterVolumeSpecName: "config-data") pod "d399c117-cbff-4044-8b48-54a5c0cc4a2c" (UID: "d399c117-cbff-4044-8b48-54a5c0cc4a2c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:26 crc kubenswrapper[4869]: I0130 11:12:26.680764 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:26 crc kubenswrapper[4869]: I0130 11:12:26.680801 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:26 crc kubenswrapper[4869]: I0130 11:12:26.680814 4869 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:26 crc kubenswrapper[4869]: I0130 11:12:26.680826 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:26 crc kubenswrapper[4869]: I0130 11:12:26.680836 4869 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d399c117-cbff-4044-8b48-54a5c0cc4a2c-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:26 crc kubenswrapper[4869]: I0130 11:12:26.680846 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktbbq\" (UniqueName: \"kubernetes.io/projected/d399c117-cbff-4044-8b48-54a5c0cc4a2c-kube-api-access-ktbbq\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:26 crc kubenswrapper[4869]: I0130 11:12:26.849032 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" Jan 30 11:12:26 crc kubenswrapper[4869]: I0130 11:12:26.910475 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-knzpq"] Jan 30 11:12:26 crc kubenswrapper[4869]: I0130 11:12:26.911070 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" podUID="532aee83-19a5-40bc-8f60-2cd3e3f662c1" containerName="dnsmasq-dns" containerID="cri-o://8693695ab06c3fc00563701174069c7cfa4aa976ac7640c33a5563a481cf2a01" gracePeriod=10 Jan 30 11:12:27 crc kubenswrapper[4869]: I0130 11:12:27.100194 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-8hhhx" event={"ID":"d399c117-cbff-4044-8b48-54a5c0cc4a2c","Type":"ContainerDied","Data":"2d115bfebee668318a8b75a7a8ef7d395a8813667ffea0928ae0f13e792206cf"} Jan 30 11:12:27 crc kubenswrapper[4869]: I0130 11:12:27.100241 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d115bfebee668318a8b75a7a8ef7d395a8813667ffea0928ae0f13e792206cf" Jan 30 11:12:27 crc kubenswrapper[4869]: I0130 11:12:27.100309 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-8hhhx" Jan 30 11:12:27 crc kubenswrapper[4869]: I0130 11:12:27.705317 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-8hhhx"] Jan 30 11:12:27 crc kubenswrapper[4869]: I0130 11:12:27.715520 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-8hhhx"] Jan 30 11:12:27 crc kubenswrapper[4869]: I0130 11:12:27.830776 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-97bpn"] Jan 30 11:12:27 crc kubenswrapper[4869]: E0130 11:12:27.831224 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4443c8c6-09f0-456c-8d9f-2a08c7861dd7" containerName="init" Jan 30 11:12:27 crc kubenswrapper[4869]: I0130 11:12:27.831250 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4443c8c6-09f0-456c-8d9f-2a08c7861dd7" containerName="init" Jan 30 11:12:27 crc kubenswrapper[4869]: E0130 11:12:27.831292 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d399c117-cbff-4044-8b48-54a5c0cc4a2c" containerName="keystone-bootstrap" Jan 30 11:12:27 crc kubenswrapper[4869]: I0130 11:12:27.831302 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d399c117-cbff-4044-8b48-54a5c0cc4a2c" containerName="keystone-bootstrap" Jan 30 11:12:27 crc kubenswrapper[4869]: I0130 11:12:27.831520 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="4443c8c6-09f0-456c-8d9f-2a08c7861dd7" containerName="init" Jan 30 11:12:27 crc kubenswrapper[4869]: I0130 11:12:27.831547 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="d399c117-cbff-4044-8b48-54a5c0cc4a2c" containerName="keystone-bootstrap" Jan 30 11:12:27 crc kubenswrapper[4869]: I0130 11:12:27.832324 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-97bpn" Jan 30 11:12:27 crc kubenswrapper[4869]: I0130 11:12:27.839413 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 30 11:12:27 crc kubenswrapper[4869]: I0130 11:12:27.839447 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 30 11:12:27 crc kubenswrapper[4869]: I0130 11:12:27.839461 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 30 11:12:27 crc kubenswrapper[4869]: I0130 11:12:27.841311 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 30 11:12:27 crc kubenswrapper[4869]: I0130 11:12:27.841376 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-kldpt" Jan 30 11:12:27 crc kubenswrapper[4869]: I0130 11:12:27.864803 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-97bpn"] Jan 30 11:12:28 crc kubenswrapper[4869]: I0130 11:12:28.014637 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-credential-keys\") pod \"keystone-bootstrap-97bpn\" (UID: \"f9113947-7343-454e-a806-50db72e74a54\") " pod="openstack/keystone-bootstrap-97bpn" Jan 30 11:12:28 crc kubenswrapper[4869]: I0130 11:12:28.014686 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-fernet-keys\") pod \"keystone-bootstrap-97bpn\" (UID: \"f9113947-7343-454e-a806-50db72e74a54\") " pod="openstack/keystone-bootstrap-97bpn" Jan 30 11:12:28 crc kubenswrapper[4869]: I0130 11:12:28.014743 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4klx\" (UniqueName: \"kubernetes.io/projected/f9113947-7343-454e-a806-50db72e74a54-kube-api-access-s4klx\") pod \"keystone-bootstrap-97bpn\" (UID: \"f9113947-7343-454e-a806-50db72e74a54\") " pod="openstack/keystone-bootstrap-97bpn" Jan 30 11:12:28 crc kubenswrapper[4869]: I0130 11:12:28.014768 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-combined-ca-bundle\") pod \"keystone-bootstrap-97bpn\" (UID: \"f9113947-7343-454e-a806-50db72e74a54\") " pod="openstack/keystone-bootstrap-97bpn" Jan 30 11:12:28 crc kubenswrapper[4869]: I0130 11:12:28.014793 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-scripts\") pod \"keystone-bootstrap-97bpn\" (UID: \"f9113947-7343-454e-a806-50db72e74a54\") " pod="openstack/keystone-bootstrap-97bpn" Jan 30 11:12:28 crc kubenswrapper[4869]: I0130 11:12:28.014815 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-config-data\") pod \"keystone-bootstrap-97bpn\" (UID: \"f9113947-7343-454e-a806-50db72e74a54\") " pod="openstack/keystone-bootstrap-97bpn" Jan 30 11:12:28 crc kubenswrapper[4869]: I0130 11:12:28.111258 4869 generic.go:334] "Generic (PLEG): container finished" podID="532aee83-19a5-40bc-8f60-2cd3e3f662c1" containerID="8693695ab06c3fc00563701174069c7cfa4aa976ac7640c33a5563a481cf2a01" exitCode=0 Jan 30 11:12:28 crc kubenswrapper[4869]: I0130 11:12:28.111303 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" event={"ID":"532aee83-19a5-40bc-8f60-2cd3e3f662c1","Type":"ContainerDied","Data":"8693695ab06c3fc00563701174069c7cfa4aa976ac7640c33a5563a481cf2a01"} Jan 30 11:12:28 crc kubenswrapper[4869]: I0130 11:12:28.116688 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-credential-keys\") pod \"keystone-bootstrap-97bpn\" (UID: \"f9113947-7343-454e-a806-50db72e74a54\") " pod="openstack/keystone-bootstrap-97bpn" Jan 30 11:12:28 crc kubenswrapper[4869]: I0130 11:12:28.116761 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-fernet-keys\") pod \"keystone-bootstrap-97bpn\" (UID: \"f9113947-7343-454e-a806-50db72e74a54\") " pod="openstack/keystone-bootstrap-97bpn" Jan 30 11:12:28 crc kubenswrapper[4869]: I0130 11:12:28.116796 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4klx\" (UniqueName: \"kubernetes.io/projected/f9113947-7343-454e-a806-50db72e74a54-kube-api-access-s4klx\") pod \"keystone-bootstrap-97bpn\" (UID: \"f9113947-7343-454e-a806-50db72e74a54\") " pod="openstack/keystone-bootstrap-97bpn" Jan 30 11:12:28 crc kubenswrapper[4869]: I0130 11:12:28.116821 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-combined-ca-bundle\") pod \"keystone-bootstrap-97bpn\" (UID: \"f9113947-7343-454e-a806-50db72e74a54\") " pod="openstack/keystone-bootstrap-97bpn" Jan 30 11:12:28 crc kubenswrapper[4869]: I0130 11:12:28.116847 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-scripts\") pod \"keystone-bootstrap-97bpn\" (UID: \"f9113947-7343-454e-a806-50db72e74a54\") " pod="openstack/keystone-bootstrap-97bpn" Jan 30 11:12:28 crc kubenswrapper[4869]: I0130 11:12:28.116876 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-config-data\") pod \"keystone-bootstrap-97bpn\" (UID: \"f9113947-7343-454e-a806-50db72e74a54\") " pod="openstack/keystone-bootstrap-97bpn" Jan 30 11:12:28 crc kubenswrapper[4869]: I0130 11:12:28.145537 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-credential-keys\") pod \"keystone-bootstrap-97bpn\" (UID: \"f9113947-7343-454e-a806-50db72e74a54\") " pod="openstack/keystone-bootstrap-97bpn" Jan 30 11:12:28 crc kubenswrapper[4869]: I0130 11:12:28.145626 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-fernet-keys\") pod \"keystone-bootstrap-97bpn\" (UID: \"f9113947-7343-454e-a806-50db72e74a54\") " pod="openstack/keystone-bootstrap-97bpn" Jan 30 11:12:28 crc kubenswrapper[4869]: I0130 11:12:28.145821 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-combined-ca-bundle\") pod \"keystone-bootstrap-97bpn\" (UID: \"f9113947-7343-454e-a806-50db72e74a54\") " pod="openstack/keystone-bootstrap-97bpn" Jan 30 11:12:28 crc kubenswrapper[4869]: I0130 11:12:28.145930 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-scripts\") pod \"keystone-bootstrap-97bpn\" (UID: \"f9113947-7343-454e-a806-50db72e74a54\") " pod="openstack/keystone-bootstrap-97bpn" Jan 30 11:12:28 crc kubenswrapper[4869]: I0130 11:12:28.147333 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-config-data\") pod \"keystone-bootstrap-97bpn\" (UID: \"f9113947-7343-454e-a806-50db72e74a54\") " pod="openstack/keystone-bootstrap-97bpn" Jan 30 11:12:28 crc kubenswrapper[4869]: I0130 11:12:28.152225 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4klx\" (UniqueName: \"kubernetes.io/projected/f9113947-7343-454e-a806-50db72e74a54-kube-api-access-s4klx\") pod \"keystone-bootstrap-97bpn\" (UID: \"f9113947-7343-454e-a806-50db72e74a54\") " pod="openstack/keystone-bootstrap-97bpn" Jan 30 11:12:28 crc kubenswrapper[4869]: I0130 11:12:28.154621 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-97bpn" Jan 30 11:12:28 crc kubenswrapper[4869]: I0130 11:12:28.171467 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d399c117-cbff-4044-8b48-54a5c0cc4a2c" path="/var/lib/kubelet/pods/d399c117-cbff-4044-8b48-54a5c0cc4a2c/volumes" Jan 30 11:12:30 crc kubenswrapper[4869]: I0130 11:12:30.266614 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" podUID="532aee83-19a5-40bc-8f60-2cd3e3f662c1" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.137:5353: connect: connection refused" Jan 30 11:12:35 crc kubenswrapper[4869]: I0130 11:12:35.266163 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" podUID="532aee83-19a5-40bc-8f60-2cd3e3f662c1" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.137:5353: connect: connection refused" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.136659 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.142446 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.205361 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c32999d5-8b78-4873-bef7-fbbe2935b2ba","Type":"ContainerDied","Data":"8681de9475fe0f49fded771d94e2350ef92d54e75e976294603c58cf9b8f9e5e"} Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.205390 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.205412 4869 scope.go:117] "RemoveContainer" containerID="8f4cd69838b8a22c789b8e9879b8a2757639954e1c0e5866c3fe7bcc330f414f" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.210256 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5b3b0759-f110-4876-8b76-19c5ee562917","Type":"ContainerDied","Data":"338c8d78f6b48499917c7b371b284c55afd14a47236c821e24de466a6be54396"} Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.210303 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.298312 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c32999d5-8b78-4873-bef7-fbbe2935b2ba-public-tls-certs\") pod \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.298384 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c32999d5-8b78-4873-bef7-fbbe2935b2ba-config-data\") pod \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.298434 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c32999d5-8b78-4873-bef7-fbbe2935b2ba-logs\") pod \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.298473 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5b3b0759-f110-4876-8b76-19c5ee562917-logs\") pod \"5b3b0759-f110-4876-8b76-19c5ee562917\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.298514 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5b3b0759-f110-4876-8b76-19c5ee562917-httpd-run\") pod \"5b3b0759-f110-4876-8b76-19c5ee562917\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.298540 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m86fv\" (UniqueName: \"kubernetes.io/projected/c32999d5-8b78-4873-bef7-fbbe2935b2ba-kube-api-access-m86fv\") pod \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.298581 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b3b0759-f110-4876-8b76-19c5ee562917-config-data\") pod \"5b3b0759-f110-4876-8b76-19c5ee562917\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.298628 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"5b3b0759-f110-4876-8b76-19c5ee562917\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.298688 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b3b0759-f110-4876-8b76-19c5ee562917-combined-ca-bundle\") pod \"5b3b0759-f110-4876-8b76-19c5ee562917\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.298745 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b3b0759-f110-4876-8b76-19c5ee562917-internal-tls-certs\") pod \"5b3b0759-f110-4876-8b76-19c5ee562917\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.298799 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.298837 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b3b0759-f110-4876-8b76-19c5ee562917-scripts\") pod \"5b3b0759-f110-4876-8b76-19c5ee562917\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.298868 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c32999d5-8b78-4873-bef7-fbbe2935b2ba-scripts\") pod \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.298897 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c32999d5-8b78-4873-bef7-fbbe2935b2ba-httpd-run\") pod \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.298929 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c32999d5-8b78-4873-bef7-fbbe2935b2ba-combined-ca-bundle\") pod \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\" (UID: \"c32999d5-8b78-4873-bef7-fbbe2935b2ba\") " Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.299006 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9h4zk\" (UniqueName: \"kubernetes.io/projected/5b3b0759-f110-4876-8b76-19c5ee562917-kube-api-access-9h4zk\") pod \"5b3b0759-f110-4876-8b76-19c5ee562917\" (UID: \"5b3b0759-f110-4876-8b76-19c5ee562917\") " Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.299293 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b3b0759-f110-4876-8b76-19c5ee562917-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "5b3b0759-f110-4876-8b76-19c5ee562917" (UID: "5b3b0759-f110-4876-8b76-19c5ee562917"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.299524 4869 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5b3b0759-f110-4876-8b76-19c5ee562917-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.299532 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c32999d5-8b78-4873-bef7-fbbe2935b2ba-logs" (OuterVolumeSpecName: "logs") pod "c32999d5-8b78-4873-bef7-fbbe2935b2ba" (UID: "c32999d5-8b78-4873-bef7-fbbe2935b2ba"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.299852 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b3b0759-f110-4876-8b76-19c5ee562917-logs" (OuterVolumeSpecName: "logs") pod "5b3b0759-f110-4876-8b76-19c5ee562917" (UID: "5b3b0759-f110-4876-8b76-19c5ee562917"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.303086 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c32999d5-8b78-4873-bef7-fbbe2935b2ba-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c32999d5-8b78-4873-bef7-fbbe2935b2ba" (UID: "c32999d5-8b78-4873-bef7-fbbe2935b2ba"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.305866 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "c32999d5-8b78-4873-bef7-fbbe2935b2ba" (UID: "c32999d5-8b78-4873-bef7-fbbe2935b2ba"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.306251 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "5b3b0759-f110-4876-8b76-19c5ee562917" (UID: "5b3b0759-f110-4876-8b76-19c5ee562917"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.306341 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b3b0759-f110-4876-8b76-19c5ee562917-kube-api-access-9h4zk" (OuterVolumeSpecName: "kube-api-access-9h4zk") pod "5b3b0759-f110-4876-8b76-19c5ee562917" (UID: "5b3b0759-f110-4876-8b76-19c5ee562917"). InnerVolumeSpecName "kube-api-access-9h4zk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.307844 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b3b0759-f110-4876-8b76-19c5ee562917-scripts" (OuterVolumeSpecName: "scripts") pod "5b3b0759-f110-4876-8b76-19c5ee562917" (UID: "5b3b0759-f110-4876-8b76-19c5ee562917"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.314136 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c32999d5-8b78-4873-bef7-fbbe2935b2ba-scripts" (OuterVolumeSpecName: "scripts") pod "c32999d5-8b78-4873-bef7-fbbe2935b2ba" (UID: "c32999d5-8b78-4873-bef7-fbbe2935b2ba"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.316286 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c32999d5-8b78-4873-bef7-fbbe2935b2ba-kube-api-access-m86fv" (OuterVolumeSpecName: "kube-api-access-m86fv") pod "c32999d5-8b78-4873-bef7-fbbe2935b2ba" (UID: "c32999d5-8b78-4873-bef7-fbbe2935b2ba"). InnerVolumeSpecName "kube-api-access-m86fv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.355892 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c32999d5-8b78-4873-bef7-fbbe2935b2ba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c32999d5-8b78-4873-bef7-fbbe2935b2ba" (UID: "c32999d5-8b78-4873-bef7-fbbe2935b2ba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.358525 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b3b0759-f110-4876-8b76-19c5ee562917-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5b3b0759-f110-4876-8b76-19c5ee562917" (UID: "5b3b0759-f110-4876-8b76-19c5ee562917"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.379370 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c32999d5-8b78-4873-bef7-fbbe2935b2ba-config-data" (OuterVolumeSpecName: "config-data") pod "c32999d5-8b78-4873-bef7-fbbe2935b2ba" (UID: "c32999d5-8b78-4873-bef7-fbbe2935b2ba"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.391094 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b3b0759-f110-4876-8b76-19c5ee562917-config-data" (OuterVolumeSpecName: "config-data") pod "5b3b0759-f110-4876-8b76-19c5ee562917" (UID: "5b3b0759-f110-4876-8b76-19c5ee562917"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.392931 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c32999d5-8b78-4873-bef7-fbbe2935b2ba-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c32999d5-8b78-4873-bef7-fbbe2935b2ba" (UID: "c32999d5-8b78-4873-bef7-fbbe2935b2ba"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.393428 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b3b0759-f110-4876-8b76-19c5ee562917-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "5b3b0759-f110-4876-8b76-19c5ee562917" (UID: "5b3b0759-f110-4876-8b76-19c5ee562917"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.400900 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c32999d5-8b78-4873-bef7-fbbe2935b2ba-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.400933 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c32999d5-8b78-4873-bef7-fbbe2935b2ba-logs\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.400942 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5b3b0759-f110-4876-8b76-19c5ee562917-logs\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.400954 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m86fv\" (UniqueName: \"kubernetes.io/projected/c32999d5-8b78-4873-bef7-fbbe2935b2ba-kube-api-access-m86fv\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.400965 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b3b0759-f110-4876-8b76-19c5ee562917-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.401073 4869 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.401101 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b3b0759-f110-4876-8b76-19c5ee562917-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.401114 4869 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b3b0759-f110-4876-8b76-19c5ee562917-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.401143 4869 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.401156 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b3b0759-f110-4876-8b76-19c5ee562917-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.401193 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c32999d5-8b78-4873-bef7-fbbe2935b2ba-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.401206 4869 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c32999d5-8b78-4873-bef7-fbbe2935b2ba-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.401218 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c32999d5-8b78-4873-bef7-fbbe2935b2ba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.401232 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9h4zk\" (UniqueName: \"kubernetes.io/projected/5b3b0759-f110-4876-8b76-19c5ee562917-kube-api-access-9h4zk\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.401246 4869 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c32999d5-8b78-4873-bef7-fbbe2935b2ba-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.421046 4869 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.421834 4869 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.503332 4869 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.503854 4869 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.582300 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.629844 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.651014 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.667103 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.674329 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 11:12:36 crc kubenswrapper[4869]: E0130 11:12:36.675086 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b3b0759-f110-4876-8b76-19c5ee562917" containerName="glance-httpd" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.675116 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b3b0759-f110-4876-8b76-19c5ee562917" containerName="glance-httpd" Jan 30 11:12:36 crc kubenswrapper[4869]: E0130 11:12:36.675133 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b3b0759-f110-4876-8b76-19c5ee562917" containerName="glance-log" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.675142 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b3b0759-f110-4876-8b76-19c5ee562917" containerName="glance-log" Jan 30 11:12:36 crc kubenswrapper[4869]: E0130 11:12:36.675177 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c32999d5-8b78-4873-bef7-fbbe2935b2ba" containerName="glance-httpd" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.675184 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="c32999d5-8b78-4873-bef7-fbbe2935b2ba" containerName="glance-httpd" Jan 30 11:12:36 crc kubenswrapper[4869]: E0130 11:12:36.675208 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c32999d5-8b78-4873-bef7-fbbe2935b2ba" containerName="glance-log" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.675216 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="c32999d5-8b78-4873-bef7-fbbe2935b2ba" containerName="glance-log" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.675431 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b3b0759-f110-4876-8b76-19c5ee562917" containerName="glance-log" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.675457 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="c32999d5-8b78-4873-bef7-fbbe2935b2ba" containerName="glance-httpd" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.675470 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b3b0759-f110-4876-8b76-19c5ee562917" containerName="glance-httpd" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.675481 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="c32999d5-8b78-4873-bef7-fbbe2935b2ba" containerName="glance-log" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.676597 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.679623 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.684396 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.684703 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.686195 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-jfgbn" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.686649 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.702863 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.704836 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.708333 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.707871 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.717117 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-config-data\") pod \"glance-default-external-api-0\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.717174 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24q9g\" (UniqueName: \"kubernetes.io/projected/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-kube-api-access-24q9g\") pod \"glance-default-external-api-0\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.717218 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-logs\") pod \"glance-default-external-api-0\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.717252 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.717288 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.717373 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-scripts\") pod \"glance-default-external-api-0\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.717540 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.717619 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.721571 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.819358 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b223a1eb-7739-43f7-ab0a-50504f2a902d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.819433 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.819470 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.819652 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.819648 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b223a1eb-7739-43f7-ab0a-50504f2a902d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.819826 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4nlj\" (UniqueName: \"kubernetes.io/projected/b223a1eb-7739-43f7-ab0a-50504f2a902d-kube-api-access-d4nlj\") pod \"glance-default-internal-api-0\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.819876 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-scripts\") pod \"glance-default-external-api-0\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.820118 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b223a1eb-7739-43f7-ab0a-50504f2a902d-logs\") pod \"glance-default-internal-api-0\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.820161 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.820283 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.820420 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.820482 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b223a1eb-7739-43f7-ab0a-50504f2a902d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.820557 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b223a1eb-7739-43f7-ab0a-50504f2a902d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.820699 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-config-data\") pod \"glance-default-external-api-0\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.820757 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24q9g\" (UniqueName: \"kubernetes.io/projected/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-kube-api-access-24q9g\") pod \"glance-default-external-api-0\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.820829 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b223a1eb-7739-43f7-ab0a-50504f2a902d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.820889 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-logs\") pod \"glance-default-external-api-0\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.821283 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.821430 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-logs\") pod \"glance-default-external-api-0\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.824570 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-scripts\") pod \"glance-default-external-api-0\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.825459 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.826797 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.827881 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-config-data\") pod \"glance-default-external-api-0\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.838559 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24q9g\" (UniqueName: \"kubernetes.io/projected/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-kube-api-access-24q9g\") pod \"glance-default-external-api-0\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.857297 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " pod="openstack/glance-default-external-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.922159 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b223a1eb-7739-43f7-ab0a-50504f2a902d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.922227 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b223a1eb-7739-43f7-ab0a-50504f2a902d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.922281 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b223a1eb-7739-43f7-ab0a-50504f2a902d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.922320 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b223a1eb-7739-43f7-ab0a-50504f2a902d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.922362 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b223a1eb-7739-43f7-ab0a-50504f2a902d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.922578 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4nlj\" (UniqueName: \"kubernetes.io/projected/b223a1eb-7739-43f7-ab0a-50504f2a902d-kube-api-access-d4nlj\") pod \"glance-default-internal-api-0\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.922635 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b223a1eb-7739-43f7-ab0a-50504f2a902d-logs\") pod \"glance-default-internal-api-0\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.922659 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.922822 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.923306 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b223a1eb-7739-43f7-ab0a-50504f2a902d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.923673 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b223a1eb-7739-43f7-ab0a-50504f2a902d-logs\") pod \"glance-default-internal-api-0\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.926538 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b223a1eb-7739-43f7-ab0a-50504f2a902d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.927052 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b223a1eb-7739-43f7-ab0a-50504f2a902d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.928253 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b223a1eb-7739-43f7-ab0a-50504f2a902d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.929378 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b223a1eb-7739-43f7-ab0a-50504f2a902d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.942244 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4nlj\" (UniqueName: \"kubernetes.io/projected/b223a1eb-7739-43f7-ab0a-50504f2a902d-kube-api-access-d4nlj\") pod \"glance-default-internal-api-0\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.952838 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:12:36 crc kubenswrapper[4869]: I0130 11:12:36.964880 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" Jan 30 11:12:36 crc kubenswrapper[4869]: E0130 11:12:36.965190 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Jan 30 11:12:36 crc kubenswrapper[4869]: E0130 11:12:36.965662 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lxjkd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-mr25n_openstack(5ecba8db-afd9-4f76-b5f1-61acfb49bd68): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 11:12:36 crc kubenswrapper[4869]: E0130 11:12:36.967990 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-mr25n" podUID="5ecba8db-afd9-4f76-b5f1-61acfb49bd68" Jan 30 11:12:37 crc kubenswrapper[4869]: I0130 11:12:37.002148 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 30 11:12:37 crc kubenswrapper[4869]: I0130 11:12:37.030803 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 30 11:12:37 crc kubenswrapper[4869]: I0130 11:12:37.126047 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-ovsdbserver-nb\") pod \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\" (UID: \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\") " Jan 30 11:12:37 crc kubenswrapper[4869]: I0130 11:12:37.126092 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-dns-swift-storage-0\") pod \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\" (UID: \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\") " Jan 30 11:12:37 crc kubenswrapper[4869]: I0130 11:12:37.126148 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-config\") pod \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\" (UID: \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\") " Jan 30 11:12:37 crc kubenswrapper[4869]: I0130 11:12:37.126181 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xn6s\" (UniqueName: \"kubernetes.io/projected/532aee83-19a5-40bc-8f60-2cd3e3f662c1-kube-api-access-9xn6s\") pod \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\" (UID: \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\") " Jan 30 11:12:37 crc kubenswrapper[4869]: I0130 11:12:37.126200 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-ovsdbserver-sb\") pod \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\" (UID: \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\") " Jan 30 11:12:37 crc kubenswrapper[4869]: I0130 11:12:37.126272 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-dns-svc\") pod \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\" (UID: \"532aee83-19a5-40bc-8f60-2cd3e3f662c1\") " Jan 30 11:12:37 crc kubenswrapper[4869]: I0130 11:12:37.130882 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/532aee83-19a5-40bc-8f60-2cd3e3f662c1-kube-api-access-9xn6s" (OuterVolumeSpecName: "kube-api-access-9xn6s") pod "532aee83-19a5-40bc-8f60-2cd3e3f662c1" (UID: "532aee83-19a5-40bc-8f60-2cd3e3f662c1"). InnerVolumeSpecName "kube-api-access-9xn6s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:12:37 crc kubenswrapper[4869]: I0130 11:12:37.177833 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-config" (OuterVolumeSpecName: "config") pod "532aee83-19a5-40bc-8f60-2cd3e3f662c1" (UID: "532aee83-19a5-40bc-8f60-2cd3e3f662c1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:37 crc kubenswrapper[4869]: I0130 11:12:37.178453 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "532aee83-19a5-40bc-8f60-2cd3e3f662c1" (UID: "532aee83-19a5-40bc-8f60-2cd3e3f662c1"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:37 crc kubenswrapper[4869]: I0130 11:12:37.185349 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "532aee83-19a5-40bc-8f60-2cd3e3f662c1" (UID: "532aee83-19a5-40bc-8f60-2cd3e3f662c1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:37 crc kubenswrapper[4869]: I0130 11:12:37.188335 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "532aee83-19a5-40bc-8f60-2cd3e3f662c1" (UID: "532aee83-19a5-40bc-8f60-2cd3e3f662c1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:37 crc kubenswrapper[4869]: I0130 11:12:37.197797 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "532aee83-19a5-40bc-8f60-2cd3e3f662c1" (UID: "532aee83-19a5-40bc-8f60-2cd3e3f662c1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:37 crc kubenswrapper[4869]: I0130 11:12:37.227573 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" Jan 30 11:12:37 crc kubenswrapper[4869]: I0130 11:12:37.228580 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-knzpq" event={"ID":"532aee83-19a5-40bc-8f60-2cd3e3f662c1","Type":"ContainerDied","Data":"b037c940c6944a5d566d9f1c2b479a8a1379ebdc782eda90054b7dcf58e6c114"} Jan 30 11:12:37 crc kubenswrapper[4869]: I0130 11:12:37.229369 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:37 crc kubenswrapper[4869]: I0130 11:12:37.229629 4869 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:37 crc kubenswrapper[4869]: I0130 11:12:37.229641 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:37 crc kubenswrapper[4869]: I0130 11:12:37.229652 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xn6s\" (UniqueName: \"kubernetes.io/projected/532aee83-19a5-40bc-8f60-2cd3e3f662c1-kube-api-access-9xn6s\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:37 crc kubenswrapper[4869]: I0130 11:12:37.229661 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:37 crc kubenswrapper[4869]: I0130 11:12:37.229670 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/532aee83-19a5-40bc-8f60-2cd3e3f662c1-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:37 crc kubenswrapper[4869]: E0130 11:12:37.232865 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-mr25n" podUID="5ecba8db-afd9-4f76-b5f1-61acfb49bd68" Jan 30 11:12:37 crc kubenswrapper[4869]: I0130 11:12:37.286563 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-knzpq"] Jan 30 11:12:37 crc kubenswrapper[4869]: I0130 11:12:37.299884 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-knzpq"] Jan 30 11:12:38 crc kubenswrapper[4869]: I0130 11:12:38.145434 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="532aee83-19a5-40bc-8f60-2cd3e3f662c1" path="/var/lib/kubelet/pods/532aee83-19a5-40bc-8f60-2cd3e3f662c1/volumes" Jan 30 11:12:38 crc kubenswrapper[4869]: I0130 11:12:38.146131 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b3b0759-f110-4876-8b76-19c5ee562917" path="/var/lib/kubelet/pods/5b3b0759-f110-4876-8b76-19c5ee562917/volumes" Jan 30 11:12:38 crc kubenswrapper[4869]: I0130 11:12:38.146845 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c32999d5-8b78-4873-bef7-fbbe2935b2ba" path="/var/lib/kubelet/pods/c32999d5-8b78-4873-bef7-fbbe2935b2ba/volumes" Jan 30 11:12:38 crc kubenswrapper[4869]: I0130 11:12:38.235810 4869 generic.go:334] "Generic (PLEG): container finished" podID="b116442d-9126-417b-a8cf-b36c70966e46" containerID="ee029ceb18de8c7ea264247d2be5d95f2b3fc59d8b8c4a57e75cfd5dadb30322" exitCode=0 Jan 30 11:12:38 crc kubenswrapper[4869]: I0130 11:12:38.235850 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-pwgmc" event={"ID":"b116442d-9126-417b-a8cf-b36c70966e46","Type":"ContainerDied","Data":"ee029ceb18de8c7ea264247d2be5d95f2b3fc59d8b8c4a57e75cfd5dadb30322"} Jan 30 11:12:38 crc kubenswrapper[4869]: I0130 11:12:38.265482 4869 scope.go:117] "RemoveContainer" containerID="b5455052d9ca36801f429fbf0dcb72e53940e653dcb12ebbd0180d76a87884cf" Jan 30 11:12:38 crc kubenswrapper[4869]: E0130 11:12:38.293123 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Jan 30 11:12:38 crc kubenswrapper[4869]: E0130 11:12:38.293284 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dxhwv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-5ctps_openstack(03699fa5-87c3-42b4-907b-586fa9d208af): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 11:12:38 crc kubenswrapper[4869]: E0130 11:12:38.294582 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-5ctps" podUID="03699fa5-87c3-42b4-907b-586fa9d208af" Jan 30 11:12:38 crc kubenswrapper[4869]: I0130 11:12:38.466928 4869 scope.go:117] "RemoveContainer" containerID="9cdf7fa0935ee83a447f2e288383139eb437c7087e260988063aff01a8b10c94" Jan 30 11:12:38 crc kubenswrapper[4869]: I0130 11:12:38.515065 4869 scope.go:117] "RemoveContainer" containerID="4118833298eb6293a9d8d4a8933820e1b04242a4c53a576c6cf994267635a5ab" Jan 30 11:12:38 crc kubenswrapper[4869]: I0130 11:12:38.535844 4869 scope.go:117] "RemoveContainer" containerID="8693695ab06c3fc00563701174069c7cfa4aa976ac7640c33a5563a481cf2a01" Jan 30 11:12:38 crc kubenswrapper[4869]: I0130 11:12:38.559941 4869 scope.go:117] "RemoveContainer" containerID="086101cbe2d42f584fb4117f2e776b752bc72e03503e92d1caa28e3ccf675d17" Jan 30 11:12:38 crc kubenswrapper[4869]: I0130 11:12:38.833884 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 11:12:38 crc kubenswrapper[4869]: I0130 11:12:38.894831 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-97bpn"] Jan 30 11:12:38 crc kubenswrapper[4869]: I0130 11:12:38.983497 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 11:12:38 crc kubenswrapper[4869]: W0130 11:12:38.991494 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6501acc1_6bb0_4ed6_8485_f2ca36b7a4f4.slice/crio-76aceb35947985fdbc0a402e4cdf131fdddda547ece33bd0aa8bc7a6dac2934e WatchSource:0}: Error finding container 76aceb35947985fdbc0a402e4cdf131fdddda547ece33bd0aa8bc7a6dac2934e: Status 404 returned error can't find the container with id 76aceb35947985fdbc0a402e4cdf131fdddda547ece33bd0aa8bc7a6dac2934e Jan 30 11:12:39 crc kubenswrapper[4869]: I0130 11:12:39.248038 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4","Type":"ContainerStarted","Data":"76aceb35947985fdbc0a402e4cdf131fdddda547ece33bd0aa8bc7a6dac2934e"} Jan 30 11:12:39 crc kubenswrapper[4869]: I0130 11:12:39.249934 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-97bpn" event={"ID":"f9113947-7343-454e-a806-50db72e74a54","Type":"ContainerStarted","Data":"066fc7f3790053de7cae6caa9f1d67b9a3ad3696f73c95279d935a734f52c6d1"} Jan 30 11:12:39 crc kubenswrapper[4869]: I0130 11:12:39.249981 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-97bpn" event={"ID":"f9113947-7343-454e-a806-50db72e74a54","Type":"ContainerStarted","Data":"7134f9bb2c79ad662193acb6ba336ecd54df3629036419af5ec7926287d24f5e"} Jan 30 11:12:39 crc kubenswrapper[4869]: I0130 11:12:39.251947 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-sj9ql" event={"ID":"44ca8382-9045-4817-b73f-3c885e446fab","Type":"ContainerStarted","Data":"d6f7e90459e93a0c1cb0bd18e82b5a0a8e2bdb2ce7d8119dcb5ef5ced7f57ff7"} Jan 30 11:12:39 crc kubenswrapper[4869]: I0130 11:12:39.254867 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"46a44949-0829-489c-8baf-31966a61641f","Type":"ContainerStarted","Data":"653553e1c2623f2d21168f37e3a4fd911742e0cfccadfdd017b3ae36afa31bdb"} Jan 30 11:12:39 crc kubenswrapper[4869]: I0130 11:12:39.262339 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b223a1eb-7739-43f7-ab0a-50504f2a902d","Type":"ContainerStarted","Data":"136f9b5fe0ed1ddec63c41b3f600195c16be05df896b8a64f102abe5c323e2f5"} Jan 30 11:12:39 crc kubenswrapper[4869]: E0130 11:12:39.266781 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-5ctps" podUID="03699fa5-87c3-42b4-907b-586fa9d208af" Jan 30 11:12:39 crc kubenswrapper[4869]: I0130 11:12:39.277736 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-97bpn" podStartSLOduration=12.277698519 podStartE2EDuration="12.277698519s" podCreationTimestamp="2026-01-30 11:12:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:12:39.272584343 +0000 UTC m=+1109.822460409" watchObservedRunningTime="2026-01-30 11:12:39.277698519 +0000 UTC m=+1109.827574585" Jan 30 11:12:39 crc kubenswrapper[4869]: I0130 11:12:39.308907 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-sj9ql" podStartSLOduration=3.5017685910000003 podStartE2EDuration="23.308883894s" podCreationTimestamp="2026-01-30 11:12:16 +0000 UTC" firstStartedPulling="2026-01-30 11:12:18.494891914 +0000 UTC m=+1089.044767980" lastFinishedPulling="2026-01-30 11:12:38.302007217 +0000 UTC m=+1108.851883283" observedRunningTime="2026-01-30 11:12:39.289517324 +0000 UTC m=+1109.839393410" watchObservedRunningTime="2026-01-30 11:12:39.308883894 +0000 UTC m=+1109.858759960" Jan 30 11:12:39 crc kubenswrapper[4869]: I0130 11:12:39.736424 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-pwgmc" Jan 30 11:12:39 crc kubenswrapper[4869]: I0130 11:12:39.833899 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b116442d-9126-417b-a8cf-b36c70966e46-combined-ca-bundle\") pod \"b116442d-9126-417b-a8cf-b36c70966e46\" (UID: \"b116442d-9126-417b-a8cf-b36c70966e46\") " Jan 30 11:12:39 crc kubenswrapper[4869]: I0130 11:12:39.833996 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b116442d-9126-417b-a8cf-b36c70966e46-config\") pod \"b116442d-9126-417b-a8cf-b36c70966e46\" (UID: \"b116442d-9126-417b-a8cf-b36c70966e46\") " Jan 30 11:12:39 crc kubenswrapper[4869]: I0130 11:12:39.834167 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwz96\" (UniqueName: \"kubernetes.io/projected/b116442d-9126-417b-a8cf-b36c70966e46-kube-api-access-gwz96\") pod \"b116442d-9126-417b-a8cf-b36c70966e46\" (UID: \"b116442d-9126-417b-a8cf-b36c70966e46\") " Jan 30 11:12:39 crc kubenswrapper[4869]: I0130 11:12:39.838818 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b116442d-9126-417b-a8cf-b36c70966e46-kube-api-access-gwz96" (OuterVolumeSpecName: "kube-api-access-gwz96") pod "b116442d-9126-417b-a8cf-b36c70966e46" (UID: "b116442d-9126-417b-a8cf-b36c70966e46"). InnerVolumeSpecName "kube-api-access-gwz96". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:12:39 crc kubenswrapper[4869]: I0130 11:12:39.861505 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b116442d-9126-417b-a8cf-b36c70966e46-config" (OuterVolumeSpecName: "config") pod "b116442d-9126-417b-a8cf-b36c70966e46" (UID: "b116442d-9126-417b-a8cf-b36c70966e46"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:39 crc kubenswrapper[4869]: I0130 11:12:39.868043 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b116442d-9126-417b-a8cf-b36c70966e46-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b116442d-9126-417b-a8cf-b36c70966e46" (UID: "b116442d-9126-417b-a8cf-b36c70966e46"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:39 crc kubenswrapper[4869]: I0130 11:12:39.935935 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwz96\" (UniqueName: \"kubernetes.io/projected/b116442d-9126-417b-a8cf-b36c70966e46-kube-api-access-gwz96\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:39 crc kubenswrapper[4869]: I0130 11:12:39.936344 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b116442d-9126-417b-a8cf-b36c70966e46-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:39 crc kubenswrapper[4869]: I0130 11:12:39.936359 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/b116442d-9126-417b-a8cf-b36c70966e46-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.304970 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-pwgmc" event={"ID":"b116442d-9126-417b-a8cf-b36c70966e46","Type":"ContainerDied","Data":"641445cf0b6213b88ab98e12a0a92ef959f5c4ce17f6f1ad534c914727bd9ec4"} Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.305038 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="641445cf0b6213b88ab98e12a0a92ef959f5c4ce17f6f1ad534c914727bd9ec4" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.305099 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-pwgmc" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.318294 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4","Type":"ContainerStarted","Data":"ac3b0f15aa26a1812a34be06ccea93eba16d3091e2d29b3b3ec7f99355ee6277"} Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.318506 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4","Type":"ContainerStarted","Data":"f9873d09f9ac0f070f0f8c68499512935ae1991883fd4cc8fd121f957ada799e"} Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.327395 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b223a1eb-7739-43f7-ab0a-50504f2a902d","Type":"ContainerStarted","Data":"54d391c10230c336bc6af8fc68de449dccf496502b5d474108d4fc337af820cf"} Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.327452 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b223a1eb-7739-43f7-ab0a-50504f2a902d","Type":"ContainerStarted","Data":"b4ee96706f1224907393f481cc9e0b745ed3bb29533ec42623194a5c477450a1"} Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.355599 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.355578772 podStartE2EDuration="4.355578772s" podCreationTimestamp="2026-01-30 11:12:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:12:40.346665099 +0000 UTC m=+1110.896541185" watchObservedRunningTime="2026-01-30 11:12:40.355578772 +0000 UTC m=+1110.905454838" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.370381 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.370365531 podStartE2EDuration="4.370365531s" podCreationTimestamp="2026-01-30 11:12:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:12:40.367990204 +0000 UTC m=+1110.917866290" watchObservedRunningTime="2026-01-30 11:12:40.370365531 +0000 UTC m=+1110.920241597" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.589504 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-2ngc6"] Jan 30 11:12:40 crc kubenswrapper[4869]: E0130 11:12:40.597892 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="532aee83-19a5-40bc-8f60-2cd3e3f662c1" containerName="init" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.597913 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="532aee83-19a5-40bc-8f60-2cd3e3f662c1" containerName="init" Jan 30 11:12:40 crc kubenswrapper[4869]: E0130 11:12:40.597928 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b116442d-9126-417b-a8cf-b36c70966e46" containerName="neutron-db-sync" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.597934 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b116442d-9126-417b-a8cf-b36c70966e46" containerName="neutron-db-sync" Jan 30 11:12:40 crc kubenswrapper[4869]: E0130 11:12:40.597966 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="532aee83-19a5-40bc-8f60-2cd3e3f662c1" containerName="dnsmasq-dns" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.597973 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="532aee83-19a5-40bc-8f60-2cd3e3f662c1" containerName="dnsmasq-dns" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.598204 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b116442d-9126-417b-a8cf-b36c70966e46" containerName="neutron-db-sync" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.598216 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="532aee83-19a5-40bc-8f60-2cd3e3f662c1" containerName="dnsmasq-dns" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.599541 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.621190 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-2ngc6"] Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.659225 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-2ngc6\" (UID: \"d9ea9c68-a482-490f-97cd-35545cea0e42\") " pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.659350 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-config\") pod \"dnsmasq-dns-6b7b667979-2ngc6\" (UID: \"d9ea9c68-a482-490f-97cd-35545cea0e42\") " pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.659387 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-2ngc6\" (UID: \"d9ea9c68-a482-490f-97cd-35545cea0e42\") " pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.659427 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fhwl\" (UniqueName: \"kubernetes.io/projected/d9ea9c68-a482-490f-97cd-35545cea0e42-kube-api-access-7fhwl\") pod \"dnsmasq-dns-6b7b667979-2ngc6\" (UID: \"d9ea9c68-a482-490f-97cd-35545cea0e42\") " pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.659536 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-2ngc6\" (UID: \"d9ea9c68-a482-490f-97cd-35545cea0e42\") " pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.659586 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-dns-svc\") pod \"dnsmasq-dns-6b7b667979-2ngc6\" (UID: \"d9ea9c68-a482-490f-97cd-35545cea0e42\") " pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.696097 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6c54c84574-hxb8h"] Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.697600 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6c54c84574-hxb8h" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.701463 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.701645 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.701771 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.702067 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-dgq8b" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.707140 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6c54c84574-hxb8h"] Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.771849 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a2820db2-7c1c-46d7-9baf-8bf031649668-config\") pod \"neutron-6c54c84574-hxb8h\" (UID: \"a2820db2-7c1c-46d7-9baf-8bf031649668\") " pod="openstack/neutron-6c54c84574-hxb8h" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.771991 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-2ngc6\" (UID: \"d9ea9c68-a482-490f-97cd-35545cea0e42\") " pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.772235 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-dns-svc\") pod \"dnsmasq-dns-6b7b667979-2ngc6\" (UID: \"d9ea9c68-a482-490f-97cd-35545cea0e42\") " pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.772382 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-2ngc6\" (UID: \"d9ea9c68-a482-490f-97cd-35545cea0e42\") " pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.772513 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66kc4\" (UniqueName: \"kubernetes.io/projected/a2820db2-7c1c-46d7-9baf-8bf031649668-kube-api-access-66kc4\") pod \"neutron-6c54c84574-hxb8h\" (UID: \"a2820db2-7c1c-46d7-9baf-8bf031649668\") " pod="openstack/neutron-6c54c84574-hxb8h" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.772604 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-config\") pod \"dnsmasq-dns-6b7b667979-2ngc6\" (UID: \"d9ea9c68-a482-490f-97cd-35545cea0e42\") " pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.772659 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-2ngc6\" (UID: \"d9ea9c68-a482-490f-97cd-35545cea0e42\") " pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.773017 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fhwl\" (UniqueName: \"kubernetes.io/projected/d9ea9c68-a482-490f-97cd-35545cea0e42-kube-api-access-7fhwl\") pod \"dnsmasq-dns-6b7b667979-2ngc6\" (UID: \"d9ea9c68-a482-490f-97cd-35545cea0e42\") " pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.773355 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a2820db2-7c1c-46d7-9baf-8bf031649668-httpd-config\") pod \"neutron-6c54c84574-hxb8h\" (UID: \"a2820db2-7c1c-46d7-9baf-8bf031649668\") " pod="openstack/neutron-6c54c84574-hxb8h" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.773365 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-2ngc6\" (UID: \"d9ea9c68-a482-490f-97cd-35545cea0e42\") " pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.774077 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2820db2-7c1c-46d7-9baf-8bf031649668-combined-ca-bundle\") pod \"neutron-6c54c84574-hxb8h\" (UID: \"a2820db2-7c1c-46d7-9baf-8bf031649668\") " pod="openstack/neutron-6c54c84574-hxb8h" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.774460 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-dns-svc\") pod \"dnsmasq-dns-6b7b667979-2ngc6\" (UID: \"d9ea9c68-a482-490f-97cd-35545cea0e42\") " pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.774896 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2820db2-7c1c-46d7-9baf-8bf031649668-ovndb-tls-certs\") pod \"neutron-6c54c84574-hxb8h\" (UID: \"a2820db2-7c1c-46d7-9baf-8bf031649668\") " pod="openstack/neutron-6c54c84574-hxb8h" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.775263 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-2ngc6\" (UID: \"d9ea9c68-a482-490f-97cd-35545cea0e42\") " pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.775824 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-2ngc6\" (UID: \"d9ea9c68-a482-490f-97cd-35545cea0e42\") " pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.777649 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-config\") pod \"dnsmasq-dns-6b7b667979-2ngc6\" (UID: \"d9ea9c68-a482-490f-97cd-35545cea0e42\") " pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.800136 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fhwl\" (UniqueName: \"kubernetes.io/projected/d9ea9c68-a482-490f-97cd-35545cea0e42-kube-api-access-7fhwl\") pod \"dnsmasq-dns-6b7b667979-2ngc6\" (UID: \"d9ea9c68-a482-490f-97cd-35545cea0e42\") " pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.877629 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a2820db2-7c1c-46d7-9baf-8bf031649668-httpd-config\") pod \"neutron-6c54c84574-hxb8h\" (UID: \"a2820db2-7c1c-46d7-9baf-8bf031649668\") " pod="openstack/neutron-6c54c84574-hxb8h" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.877691 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2820db2-7c1c-46d7-9baf-8bf031649668-combined-ca-bundle\") pod \"neutron-6c54c84574-hxb8h\" (UID: \"a2820db2-7c1c-46d7-9baf-8bf031649668\") " pod="openstack/neutron-6c54c84574-hxb8h" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.877736 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2820db2-7c1c-46d7-9baf-8bf031649668-ovndb-tls-certs\") pod \"neutron-6c54c84574-hxb8h\" (UID: \"a2820db2-7c1c-46d7-9baf-8bf031649668\") " pod="openstack/neutron-6c54c84574-hxb8h" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.877785 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a2820db2-7c1c-46d7-9baf-8bf031649668-config\") pod \"neutron-6c54c84574-hxb8h\" (UID: \"a2820db2-7c1c-46d7-9baf-8bf031649668\") " pod="openstack/neutron-6c54c84574-hxb8h" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.877869 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66kc4\" (UniqueName: \"kubernetes.io/projected/a2820db2-7c1c-46d7-9baf-8bf031649668-kube-api-access-66kc4\") pod \"neutron-6c54c84574-hxb8h\" (UID: \"a2820db2-7c1c-46d7-9baf-8bf031649668\") " pod="openstack/neutron-6c54c84574-hxb8h" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.886616 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2820db2-7c1c-46d7-9baf-8bf031649668-combined-ca-bundle\") pod \"neutron-6c54c84574-hxb8h\" (UID: \"a2820db2-7c1c-46d7-9baf-8bf031649668\") " pod="openstack/neutron-6c54c84574-hxb8h" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.886648 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2820db2-7c1c-46d7-9baf-8bf031649668-ovndb-tls-certs\") pod \"neutron-6c54c84574-hxb8h\" (UID: \"a2820db2-7c1c-46d7-9baf-8bf031649668\") " pod="openstack/neutron-6c54c84574-hxb8h" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.887331 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a2820db2-7c1c-46d7-9baf-8bf031649668-httpd-config\") pod \"neutron-6c54c84574-hxb8h\" (UID: \"a2820db2-7c1c-46d7-9baf-8bf031649668\") " pod="openstack/neutron-6c54c84574-hxb8h" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.888580 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a2820db2-7c1c-46d7-9baf-8bf031649668-config\") pod \"neutron-6c54c84574-hxb8h\" (UID: \"a2820db2-7c1c-46d7-9baf-8bf031649668\") " pod="openstack/neutron-6c54c84574-hxb8h" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.897214 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66kc4\" (UniqueName: \"kubernetes.io/projected/a2820db2-7c1c-46d7-9baf-8bf031649668-kube-api-access-66kc4\") pod \"neutron-6c54c84574-hxb8h\" (UID: \"a2820db2-7c1c-46d7-9baf-8bf031649668\") " pod="openstack/neutron-6c54c84574-hxb8h" Jan 30 11:12:40 crc kubenswrapper[4869]: I0130 11:12:40.966140 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" Jan 30 11:12:41 crc kubenswrapper[4869]: I0130 11:12:41.046635 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6c54c84574-hxb8h" Jan 30 11:12:41 crc kubenswrapper[4869]: I0130 11:12:41.345770 4869 generic.go:334] "Generic (PLEG): container finished" podID="44ca8382-9045-4817-b73f-3c885e446fab" containerID="d6f7e90459e93a0c1cb0bd18e82b5a0a8e2bdb2ce7d8119dcb5ef5ced7f57ff7" exitCode=0 Jan 30 11:12:41 crc kubenswrapper[4869]: I0130 11:12:41.345891 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-sj9ql" event={"ID":"44ca8382-9045-4817-b73f-3c885e446fab","Type":"ContainerDied","Data":"d6f7e90459e93a0c1cb0bd18e82b5a0a8e2bdb2ce7d8119dcb5ef5ced7f57ff7"} Jan 30 11:12:41 crc kubenswrapper[4869]: I0130 11:12:41.358567 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"46a44949-0829-489c-8baf-31966a61641f","Type":"ContainerStarted","Data":"ef2be8647a33a563b522128ed9461507b42ca0e66e663c90aec39048781728d9"} Jan 30 11:12:41 crc kubenswrapper[4869]: I0130 11:12:41.423397 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-2ngc6"] Jan 30 11:12:41 crc kubenswrapper[4869]: W0130 11:12:41.759308 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda2820db2_7c1c_46d7_9baf_8bf031649668.slice/crio-1d70180f54cd9cd737060df78c0fb3687e66b3d7d89cfd76caff5c9d291cf815 WatchSource:0}: Error finding container 1d70180f54cd9cd737060df78c0fb3687e66b3d7d89cfd76caff5c9d291cf815: Status 404 returned error can't find the container with id 1d70180f54cd9cd737060df78c0fb3687e66b3d7d89cfd76caff5c9d291cf815 Jan 30 11:12:41 crc kubenswrapper[4869]: I0130 11:12:41.760559 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6c54c84574-hxb8h"] Jan 30 11:12:42 crc kubenswrapper[4869]: I0130 11:12:42.383546 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c54c84574-hxb8h" event={"ID":"a2820db2-7c1c-46d7-9baf-8bf031649668","Type":"ContainerStarted","Data":"9de643e0adb20ada99ce0f5134b6c4728242a55ce93d1ec0e43ce5db03b38852"} Jan 30 11:12:42 crc kubenswrapper[4869]: I0130 11:12:42.383589 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c54c84574-hxb8h" event={"ID":"a2820db2-7c1c-46d7-9baf-8bf031649668","Type":"ContainerStarted","Data":"f9e32cc6f22599826f2c7ba35fc0e547c20981a4190c6a91c483999252cd7e29"} Jan 30 11:12:42 crc kubenswrapper[4869]: I0130 11:12:42.383599 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c54c84574-hxb8h" event={"ID":"a2820db2-7c1c-46d7-9baf-8bf031649668","Type":"ContainerStarted","Data":"1d70180f54cd9cd737060df78c0fb3687e66b3d7d89cfd76caff5c9d291cf815"} Jan 30 11:12:42 crc kubenswrapper[4869]: I0130 11:12:42.383739 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6c54c84574-hxb8h" Jan 30 11:12:42 crc kubenswrapper[4869]: I0130 11:12:42.386943 4869 generic.go:334] "Generic (PLEG): container finished" podID="d9ea9c68-a482-490f-97cd-35545cea0e42" containerID="e76b6d419f6127e43ab00abb9f4a5dd7d9288f95e4fb58822118ba2a20fe511c" exitCode=0 Jan 30 11:12:42 crc kubenswrapper[4869]: I0130 11:12:42.387565 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" event={"ID":"d9ea9c68-a482-490f-97cd-35545cea0e42","Type":"ContainerDied","Data":"e76b6d419f6127e43ab00abb9f4a5dd7d9288f95e4fb58822118ba2a20fe511c"} Jan 30 11:12:42 crc kubenswrapper[4869]: I0130 11:12:42.387586 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" event={"ID":"d9ea9c68-a482-490f-97cd-35545cea0e42","Type":"ContainerStarted","Data":"23240a5cbe7f86942d12b9db64820b949059b4cc21d50cc7fe0e2678eb186e32"} Jan 30 11:12:42 crc kubenswrapper[4869]: I0130 11:12:42.426019 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6c54c84574-hxb8h" podStartSLOduration=2.425996976 podStartE2EDuration="2.425996976s" podCreationTimestamp="2026-01-30 11:12:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:12:42.421966041 +0000 UTC m=+1112.971842107" watchObservedRunningTime="2026-01-30 11:12:42.425996976 +0000 UTC m=+1112.975873042" Jan 30 11:12:42 crc kubenswrapper[4869]: I0130 11:12:42.844594 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-sj9ql" Jan 30 11:12:42 crc kubenswrapper[4869]: I0130 11:12:42.932978 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-62g6z\" (UniqueName: \"kubernetes.io/projected/44ca8382-9045-4817-b73f-3c885e446fab-kube-api-access-62g6z\") pod \"44ca8382-9045-4817-b73f-3c885e446fab\" (UID: \"44ca8382-9045-4817-b73f-3c885e446fab\") " Jan 30 11:12:42 crc kubenswrapper[4869]: I0130 11:12:42.933112 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44ca8382-9045-4817-b73f-3c885e446fab-scripts\") pod \"44ca8382-9045-4817-b73f-3c885e446fab\" (UID: \"44ca8382-9045-4817-b73f-3c885e446fab\") " Jan 30 11:12:42 crc kubenswrapper[4869]: I0130 11:12:42.933189 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44ca8382-9045-4817-b73f-3c885e446fab-config-data\") pod \"44ca8382-9045-4817-b73f-3c885e446fab\" (UID: \"44ca8382-9045-4817-b73f-3c885e446fab\") " Jan 30 11:12:42 crc kubenswrapper[4869]: I0130 11:12:42.933285 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44ca8382-9045-4817-b73f-3c885e446fab-combined-ca-bundle\") pod \"44ca8382-9045-4817-b73f-3c885e446fab\" (UID: \"44ca8382-9045-4817-b73f-3c885e446fab\") " Jan 30 11:12:42 crc kubenswrapper[4869]: I0130 11:12:42.933307 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/44ca8382-9045-4817-b73f-3c885e446fab-logs\") pod \"44ca8382-9045-4817-b73f-3c885e446fab\" (UID: \"44ca8382-9045-4817-b73f-3c885e446fab\") " Jan 30 11:12:42 crc kubenswrapper[4869]: I0130 11:12:42.935828 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44ca8382-9045-4817-b73f-3c885e446fab-logs" (OuterVolumeSpecName: "logs") pod "44ca8382-9045-4817-b73f-3c885e446fab" (UID: "44ca8382-9045-4817-b73f-3c885e446fab"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:12:42 crc kubenswrapper[4869]: I0130 11:12:42.939435 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44ca8382-9045-4817-b73f-3c885e446fab-kube-api-access-62g6z" (OuterVolumeSpecName: "kube-api-access-62g6z") pod "44ca8382-9045-4817-b73f-3c885e446fab" (UID: "44ca8382-9045-4817-b73f-3c885e446fab"). InnerVolumeSpecName "kube-api-access-62g6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:12:42 crc kubenswrapper[4869]: I0130 11:12:42.939437 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44ca8382-9045-4817-b73f-3c885e446fab-scripts" (OuterVolumeSpecName: "scripts") pod "44ca8382-9045-4817-b73f-3c885e446fab" (UID: "44ca8382-9045-4817-b73f-3c885e446fab"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:42 crc kubenswrapper[4869]: I0130 11:12:42.959277 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44ca8382-9045-4817-b73f-3c885e446fab-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "44ca8382-9045-4817-b73f-3c885e446fab" (UID: "44ca8382-9045-4817-b73f-3c885e446fab"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:42 crc kubenswrapper[4869]: I0130 11:12:42.965241 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44ca8382-9045-4817-b73f-3c885e446fab-config-data" (OuterVolumeSpecName: "config-data") pod "44ca8382-9045-4817-b73f-3c885e446fab" (UID: "44ca8382-9045-4817-b73f-3c885e446fab"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.035325 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44ca8382-9045-4817-b73f-3c885e446fab-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.035592 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/44ca8382-9045-4817-b73f-3c885e446fab-logs\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.035604 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62g6z\" (UniqueName: \"kubernetes.io/projected/44ca8382-9045-4817-b73f-3c885e446fab-kube-api-access-62g6z\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.035615 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44ca8382-9045-4817-b73f-3c885e446fab-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.035626 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44ca8382-9045-4817-b73f-3c885e446fab-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.414691 4869 generic.go:334] "Generic (PLEG): container finished" podID="f9113947-7343-454e-a806-50db72e74a54" containerID="066fc7f3790053de7cae6caa9f1d67b9a3ad3696f73c95279d935a734f52c6d1" exitCode=0 Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.414771 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-97bpn" event={"ID":"f9113947-7343-454e-a806-50db72e74a54","Type":"ContainerDied","Data":"066fc7f3790053de7cae6caa9f1d67b9a3ad3696f73c95279d935a734f52c6d1"} Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.420533 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-sj9ql" event={"ID":"44ca8382-9045-4817-b73f-3c885e446fab","Type":"ContainerDied","Data":"d18bb30273bad7356e7fa9119b473e23410567e8bfab0570cb7c38180bd872e1"} Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.420560 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d18bb30273bad7356e7fa9119b473e23410567e8bfab0570cb7c38180bd872e1" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.420573 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-sj9ql" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.428327 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" event={"ID":"d9ea9c68-a482-490f-97cd-35545cea0e42","Type":"ContainerStarted","Data":"8c823d27e22f4a9ac0ca832f9154506c43529d6c92299562f66dac8caad8714b"} Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.428611 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.460116 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" podStartSLOduration=3.460086325 podStartE2EDuration="3.460086325s" podCreationTimestamp="2026-01-30 11:12:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:12:43.454102285 +0000 UTC m=+1114.003978361" watchObservedRunningTime="2026-01-30 11:12:43.460086325 +0000 UTC m=+1114.009962391" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.540040 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-7d5fb468b8-g8bf7"] Jan 30 11:12:43 crc kubenswrapper[4869]: E0130 11:12:43.540649 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44ca8382-9045-4817-b73f-3c885e446fab" containerName="placement-db-sync" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.540667 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="44ca8382-9045-4817-b73f-3c885e446fab" containerName="placement-db-sync" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.540981 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="44ca8382-9045-4817-b73f-3c885e446fab" containerName="placement-db-sync" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.542134 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.589659 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7d5fb468b8-g8bf7"] Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.593884 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.602248 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.602683 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.603279 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.609958 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-plprs" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.662405 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-config-data\") pod \"placement-7d5fb468b8-g8bf7\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.662450 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwqsf\" (UniqueName: \"kubernetes.io/projected/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-kube-api-access-wwqsf\") pod \"placement-7d5fb468b8-g8bf7\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.662501 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-public-tls-certs\") pod \"placement-7d5fb468b8-g8bf7\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.662582 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-combined-ca-bundle\") pod \"placement-7d5fb468b8-g8bf7\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.662614 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-logs\") pod \"placement-7d5fb468b8-g8bf7\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.662641 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-internal-tls-certs\") pod \"placement-7d5fb468b8-g8bf7\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.662666 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-scripts\") pod \"placement-7d5fb468b8-g8bf7\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.764566 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-combined-ca-bundle\") pod \"placement-7d5fb468b8-g8bf7\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.768421 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-logs\") pod \"placement-7d5fb468b8-g8bf7\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.768504 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-internal-tls-certs\") pod \"placement-7d5fb468b8-g8bf7\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.768549 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-scripts\") pod \"placement-7d5fb468b8-g8bf7\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.768627 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-config-data\") pod \"placement-7d5fb468b8-g8bf7\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.768646 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwqsf\" (UniqueName: \"kubernetes.io/projected/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-kube-api-access-wwqsf\") pod \"placement-7d5fb468b8-g8bf7\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.768747 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-public-tls-certs\") pod \"placement-7d5fb468b8-g8bf7\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.780513 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-logs\") pod \"placement-7d5fb468b8-g8bf7\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.790833 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-scripts\") pod \"placement-7d5fb468b8-g8bf7\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.796508 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-combined-ca-bundle\") pod \"placement-7d5fb468b8-g8bf7\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.811981 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-public-tls-certs\") pod \"placement-7d5fb468b8-g8bf7\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.812452 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-internal-tls-certs\") pod \"placement-7d5fb468b8-g8bf7\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.812734 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-config-data\") pod \"placement-7d5fb468b8-g8bf7\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.813282 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwqsf\" (UniqueName: \"kubernetes.io/projected/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-kube-api-access-wwqsf\") pod \"placement-7d5fb468b8-g8bf7\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.905554 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.966231 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6b94756495-f44c6"] Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.968092 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.986356 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-combined-ca-bundle\") pod \"neutron-6b94756495-f44c6\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.986536 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-internal-tls-certs\") pod \"neutron-6b94756495-f44c6\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.986568 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-httpd-config\") pod \"neutron-6b94756495-f44c6\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.986597 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-config\") pod \"neutron-6b94756495-f44c6\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.986635 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98sdq\" (UniqueName: \"kubernetes.io/projected/36d9c803-e141-42ec-a169-66838e70db68-kube-api-access-98sdq\") pod \"neutron-6b94756495-f44c6\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.986675 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-public-tls-certs\") pod \"neutron-6b94756495-f44c6\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.986863 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-ovndb-tls-certs\") pod \"neutron-6b94756495-f44c6\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.987857 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6b94756495-f44c6"] Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.997949 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Jan 30 11:12:43 crc kubenswrapper[4869]: I0130 11:12:43.998196 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Jan 30 11:12:44 crc kubenswrapper[4869]: I0130 11:12:44.088385 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-internal-tls-certs\") pod \"neutron-6b94756495-f44c6\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:12:44 crc kubenswrapper[4869]: I0130 11:12:44.088432 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-httpd-config\") pod \"neutron-6b94756495-f44c6\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:12:44 crc kubenswrapper[4869]: I0130 11:12:44.088452 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-config\") pod \"neutron-6b94756495-f44c6\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:12:44 crc kubenswrapper[4869]: I0130 11:12:44.088480 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98sdq\" (UniqueName: \"kubernetes.io/projected/36d9c803-e141-42ec-a169-66838e70db68-kube-api-access-98sdq\") pod \"neutron-6b94756495-f44c6\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:12:44 crc kubenswrapper[4869]: I0130 11:12:44.088507 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-public-tls-certs\") pod \"neutron-6b94756495-f44c6\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:12:44 crc kubenswrapper[4869]: I0130 11:12:44.088554 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-ovndb-tls-certs\") pod \"neutron-6b94756495-f44c6\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:12:44 crc kubenswrapper[4869]: I0130 11:12:44.088602 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-combined-ca-bundle\") pod \"neutron-6b94756495-f44c6\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:12:44 crc kubenswrapper[4869]: I0130 11:12:44.095129 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-config\") pod \"neutron-6b94756495-f44c6\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:12:44 crc kubenswrapper[4869]: I0130 11:12:44.096394 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-ovndb-tls-certs\") pod \"neutron-6b94756495-f44c6\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:12:44 crc kubenswrapper[4869]: I0130 11:12:44.098353 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-httpd-config\") pod \"neutron-6b94756495-f44c6\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:12:44 crc kubenswrapper[4869]: I0130 11:12:44.103345 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-combined-ca-bundle\") pod \"neutron-6b94756495-f44c6\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:12:44 crc kubenswrapper[4869]: I0130 11:12:44.105659 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-internal-tls-certs\") pod \"neutron-6b94756495-f44c6\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:12:44 crc kubenswrapper[4869]: I0130 11:12:44.108269 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98sdq\" (UniqueName: \"kubernetes.io/projected/36d9c803-e141-42ec-a169-66838e70db68-kube-api-access-98sdq\") pod \"neutron-6b94756495-f44c6\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:12:44 crc kubenswrapper[4869]: I0130 11:12:44.108831 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-public-tls-certs\") pod \"neutron-6b94756495-f44c6\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:12:44 crc kubenswrapper[4869]: I0130 11:12:44.330397 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:12:47 crc kubenswrapper[4869]: I0130 11:12:47.002486 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 30 11:12:47 crc kubenswrapper[4869]: I0130 11:12:47.003006 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 30 11:12:47 crc kubenswrapper[4869]: I0130 11:12:47.031682 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 30 11:12:47 crc kubenswrapper[4869]: I0130 11:12:47.031736 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 30 11:12:47 crc kubenswrapper[4869]: I0130 11:12:47.036033 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 30 11:12:47 crc kubenswrapper[4869]: I0130 11:12:47.059913 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 30 11:12:47 crc kubenswrapper[4869]: I0130 11:12:47.083058 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 30 11:12:47 crc kubenswrapper[4869]: I0130 11:12:47.091243 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 30 11:12:47 crc kubenswrapper[4869]: I0130 11:12:47.470914 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 30 11:12:47 crc kubenswrapper[4869]: I0130 11:12:47.470962 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 30 11:12:47 crc kubenswrapper[4869]: I0130 11:12:47.470972 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 30 11:12:47 crc kubenswrapper[4869]: I0130 11:12:47.470996 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 30 11:12:48 crc kubenswrapper[4869]: I0130 11:12:48.987305 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-97bpn" Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.089276 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-credential-keys\") pod \"f9113947-7343-454e-a806-50db72e74a54\" (UID: \"f9113947-7343-454e-a806-50db72e74a54\") " Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.089346 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-combined-ca-bundle\") pod \"f9113947-7343-454e-a806-50db72e74a54\" (UID: \"f9113947-7343-454e-a806-50db72e74a54\") " Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.089494 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-config-data\") pod \"f9113947-7343-454e-a806-50db72e74a54\" (UID: \"f9113947-7343-454e-a806-50db72e74a54\") " Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.089618 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4klx\" (UniqueName: \"kubernetes.io/projected/f9113947-7343-454e-a806-50db72e74a54-kube-api-access-s4klx\") pod \"f9113947-7343-454e-a806-50db72e74a54\" (UID: \"f9113947-7343-454e-a806-50db72e74a54\") " Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.089754 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-fernet-keys\") pod \"f9113947-7343-454e-a806-50db72e74a54\" (UID: \"f9113947-7343-454e-a806-50db72e74a54\") " Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.089785 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-scripts\") pod \"f9113947-7343-454e-a806-50db72e74a54\" (UID: \"f9113947-7343-454e-a806-50db72e74a54\") " Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.103104 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9113947-7343-454e-a806-50db72e74a54-kube-api-access-s4klx" (OuterVolumeSpecName: "kube-api-access-s4klx") pod "f9113947-7343-454e-a806-50db72e74a54" (UID: "f9113947-7343-454e-a806-50db72e74a54"). InnerVolumeSpecName "kube-api-access-s4klx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.103967 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "f9113947-7343-454e-a806-50db72e74a54" (UID: "f9113947-7343-454e-a806-50db72e74a54"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.106906 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-scripts" (OuterVolumeSpecName: "scripts") pod "f9113947-7343-454e-a806-50db72e74a54" (UID: "f9113947-7343-454e-a806-50db72e74a54"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.109951 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "f9113947-7343-454e-a806-50db72e74a54" (UID: "f9113947-7343-454e-a806-50db72e74a54"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.137923 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-config-data" (OuterVolumeSpecName: "config-data") pod "f9113947-7343-454e-a806-50db72e74a54" (UID: "f9113947-7343-454e-a806-50db72e74a54"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.147933 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f9113947-7343-454e-a806-50db72e74a54" (UID: "f9113947-7343-454e-a806-50db72e74a54"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.191857 4869 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.191895 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.191907 4869 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.191917 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.191926 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9113947-7343-454e-a806-50db72e74a54-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.191936 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4klx\" (UniqueName: \"kubernetes.io/projected/f9113947-7343-454e-a806-50db72e74a54-kube-api-access-s4klx\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.295423 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7d5fb468b8-g8bf7"] Jan 30 11:12:49 crc kubenswrapper[4869]: W0130 11:12:49.303618 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podca11f9fd_8bd6_4e6c_b99b_187fd5b00d20.slice/crio-ea8c3373f3fcb84389eb5ec5fc4720bc39a770678d5df3852493c1ebb0d68bb1 WatchSource:0}: Error finding container ea8c3373f3fcb84389eb5ec5fc4720bc39a770678d5df3852493c1ebb0d68bb1: Status 404 returned error can't find the container with id ea8c3373f3fcb84389eb5ec5fc4720bc39a770678d5df3852493c1ebb0d68bb1 Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.493646 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6b94756495-f44c6"] Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.496868 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-97bpn" event={"ID":"f9113947-7343-454e-a806-50db72e74a54","Type":"ContainerDied","Data":"7134f9bb2c79ad662193acb6ba336ecd54df3629036419af5ec7926287d24f5e"} Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.496928 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7134f9bb2c79ad662193acb6ba336ecd54df3629036419af5ec7926287d24f5e" Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.496991 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-97bpn" Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.506733 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"46a44949-0829-489c-8baf-31966a61641f","Type":"ContainerStarted","Data":"9ff07e90770d898c4b45efedf3a39d283ddced6f78bc7b59dd02d8279f528a0e"} Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.513815 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7d5fb468b8-g8bf7" event={"ID":"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20","Type":"ContainerStarted","Data":"ea8c3373f3fcb84389eb5ec5fc4720bc39a770678d5df3852493c1ebb0d68bb1"} Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.787775 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.788008 4869 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 30 11:12:49 crc kubenswrapper[4869]: I0130 11:12:49.817850 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.034536 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.037495 4869 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.081111 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.118174 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-6879fcbdc7-xgzr6"] Jan 30 11:12:50 crc kubenswrapper[4869]: E0130 11:12:50.119126 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9113947-7343-454e-a806-50db72e74a54" containerName="keystone-bootstrap" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.119293 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9113947-7343-454e-a806-50db72e74a54" containerName="keystone-bootstrap" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.119735 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9113947-7343-454e-a806-50db72e74a54" containerName="keystone-bootstrap" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.120777 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.124609 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.124946 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.125121 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.125666 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.132685 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.132934 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-kldpt" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.176789 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6879fcbdc7-xgzr6"] Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.220619 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-public-tls-certs\") pod \"keystone-6879fcbdc7-xgzr6\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.220683 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-combined-ca-bundle\") pod \"keystone-6879fcbdc7-xgzr6\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.220767 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-fernet-keys\") pod \"keystone-6879fcbdc7-xgzr6\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.220816 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-scripts\") pod \"keystone-6879fcbdc7-xgzr6\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.220837 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-config-data\") pod \"keystone-6879fcbdc7-xgzr6\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.220873 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t99ck\" (UniqueName: \"kubernetes.io/projected/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-kube-api-access-t99ck\") pod \"keystone-6879fcbdc7-xgzr6\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.220893 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-credential-keys\") pod \"keystone-6879fcbdc7-xgzr6\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.220950 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-internal-tls-certs\") pod \"keystone-6879fcbdc7-xgzr6\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.323267 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-internal-tls-certs\") pod \"keystone-6879fcbdc7-xgzr6\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.323379 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-public-tls-certs\") pod \"keystone-6879fcbdc7-xgzr6\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.323410 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-combined-ca-bundle\") pod \"keystone-6879fcbdc7-xgzr6\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.323479 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-fernet-keys\") pod \"keystone-6879fcbdc7-xgzr6\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.323519 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-scripts\") pod \"keystone-6879fcbdc7-xgzr6\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.323547 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-config-data\") pod \"keystone-6879fcbdc7-xgzr6\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.323597 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t99ck\" (UniqueName: \"kubernetes.io/projected/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-kube-api-access-t99ck\") pod \"keystone-6879fcbdc7-xgzr6\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.323621 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-credential-keys\") pod \"keystone-6879fcbdc7-xgzr6\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.331920 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-internal-tls-certs\") pod \"keystone-6879fcbdc7-xgzr6\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.333440 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-scripts\") pod \"keystone-6879fcbdc7-xgzr6\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.334648 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-credential-keys\") pod \"keystone-6879fcbdc7-xgzr6\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.334804 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-public-tls-certs\") pod \"keystone-6879fcbdc7-xgzr6\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.338401 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-combined-ca-bundle\") pod \"keystone-6879fcbdc7-xgzr6\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.344106 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t99ck\" (UniqueName: \"kubernetes.io/projected/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-kube-api-access-t99ck\") pod \"keystone-6879fcbdc7-xgzr6\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.345906 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-config-data\") pod \"keystone-6879fcbdc7-xgzr6\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.354213 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-fernet-keys\") pod \"keystone-6879fcbdc7-xgzr6\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.453131 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.530045 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7d5fb468b8-g8bf7" event={"ID":"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20","Type":"ContainerStarted","Data":"404eb63ee62b3affae7045d750b181184412873698b64ea44634150e7b7a1fac"} Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.530107 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7d5fb468b8-g8bf7" event={"ID":"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20","Type":"ContainerStarted","Data":"8aeef19a30b1cb750ef8efe5876d1ee30611d8c32a2c27a0ced2835a322d663b"} Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.530265 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.530288 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.535222 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b94756495-f44c6" event={"ID":"36d9c803-e141-42ec-a169-66838e70db68","Type":"ContainerStarted","Data":"9c8a94bbfc90e47a8d2c00ce2fcbb49a27c39f7b15d444037af8085e4789dd3a"} Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.535279 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b94756495-f44c6" event={"ID":"36d9c803-e141-42ec-a169-66838e70db68","Type":"ContainerStarted","Data":"b5b80d71d4c52800890ddf23dc8a54d41d125816b930b86402cf8924ae35084c"} Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.535294 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b94756495-f44c6" event={"ID":"36d9c803-e141-42ec-a169-66838e70db68","Type":"ContainerStarted","Data":"ff6a318f34b4f7709b167bb6e09140f856f6f70a0b1f1ea0f6fc8d5e94c32877"} Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.562699 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-7d5fb468b8-g8bf7" podStartSLOduration=7.562672503 podStartE2EDuration="7.562672503s" podCreationTimestamp="2026-01-30 11:12:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:12:50.553209354 +0000 UTC m=+1121.103085430" watchObservedRunningTime="2026-01-30 11:12:50.562672503 +0000 UTC m=+1121.112548589" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.599864 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6b94756495-f44c6" podStartSLOduration=7.599833578 podStartE2EDuration="7.599833578s" podCreationTimestamp="2026-01-30 11:12:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:12:50.588461755 +0000 UTC m=+1121.138337841" watchObservedRunningTime="2026-01-30 11:12:50.599833578 +0000 UTC m=+1121.149709654" Jan 30 11:12:50 crc kubenswrapper[4869]: I0130 11:12:50.972965 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" Jan 30 11:12:51 crc kubenswrapper[4869]: I0130 11:12:50.999738 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6879fcbdc7-xgzr6"] Jan 30 11:12:51 crc kubenswrapper[4869]: I0130 11:12:51.080313 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-5jwbz"] Jan 30 11:12:51 crc kubenswrapper[4869]: I0130 11:12:51.081016 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" podUID="6428ca59-598a-417c-a4b0-5f1cabe400cf" containerName="dnsmasq-dns" containerID="cri-o://d20a5b414c144535904b5d82437c17681cd88daa6480c3e93f4b6c8692713739" gracePeriod=10 Jan 30 11:12:51 crc kubenswrapper[4869]: I0130 11:12:51.559759 4869 generic.go:334] "Generic (PLEG): container finished" podID="6428ca59-598a-417c-a4b0-5f1cabe400cf" containerID="d20a5b414c144535904b5d82437c17681cd88daa6480c3e93f4b6c8692713739" exitCode=0 Jan 30 11:12:51 crc kubenswrapper[4869]: I0130 11:12:51.560034 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" event={"ID":"6428ca59-598a-417c-a4b0-5f1cabe400cf","Type":"ContainerDied","Data":"d20a5b414c144535904b5d82437c17681cd88daa6480c3e93f4b6c8692713739"} Jan 30 11:12:51 crc kubenswrapper[4869]: I0130 11:12:51.567681 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6879fcbdc7-xgzr6" event={"ID":"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1","Type":"ContainerStarted","Data":"14547de67c89fe61548feaf147a6440ef77b34ea2530d07ac2a91f821ffc1bc1"} Jan 30 11:12:51 crc kubenswrapper[4869]: I0130 11:12:51.568008 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:12:51 crc kubenswrapper[4869]: I0130 11:12:51.877024 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.039437 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2879x\" (UniqueName: \"kubernetes.io/projected/6428ca59-598a-417c-a4b0-5f1cabe400cf-kube-api-access-2879x\") pod \"6428ca59-598a-417c-a4b0-5f1cabe400cf\" (UID: \"6428ca59-598a-417c-a4b0-5f1cabe400cf\") " Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.039877 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-ovsdbserver-sb\") pod \"6428ca59-598a-417c-a4b0-5f1cabe400cf\" (UID: \"6428ca59-598a-417c-a4b0-5f1cabe400cf\") " Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.039971 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-dns-svc\") pod \"6428ca59-598a-417c-a4b0-5f1cabe400cf\" (UID: \"6428ca59-598a-417c-a4b0-5f1cabe400cf\") " Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.040076 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-dns-swift-storage-0\") pod \"6428ca59-598a-417c-a4b0-5f1cabe400cf\" (UID: \"6428ca59-598a-417c-a4b0-5f1cabe400cf\") " Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.040099 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-ovsdbserver-nb\") pod \"6428ca59-598a-417c-a4b0-5f1cabe400cf\" (UID: \"6428ca59-598a-417c-a4b0-5f1cabe400cf\") " Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.040156 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-config\") pod \"6428ca59-598a-417c-a4b0-5f1cabe400cf\" (UID: \"6428ca59-598a-417c-a4b0-5f1cabe400cf\") " Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.054210 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6428ca59-598a-417c-a4b0-5f1cabe400cf-kube-api-access-2879x" (OuterVolumeSpecName: "kube-api-access-2879x") pod "6428ca59-598a-417c-a4b0-5f1cabe400cf" (UID: "6428ca59-598a-417c-a4b0-5f1cabe400cf"). InnerVolumeSpecName "kube-api-access-2879x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.111785 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "6428ca59-598a-417c-a4b0-5f1cabe400cf" (UID: "6428ca59-598a-417c-a4b0-5f1cabe400cf"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.138261 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6428ca59-598a-417c-a4b0-5f1cabe400cf" (UID: "6428ca59-598a-417c-a4b0-5f1cabe400cf"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.149071 4869 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.151549 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.151633 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2879x\" (UniqueName: \"kubernetes.io/projected/6428ca59-598a-417c-a4b0-5f1cabe400cf-kube-api-access-2879x\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.161589 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-config" (OuterVolumeSpecName: "config") pod "6428ca59-598a-417c-a4b0-5f1cabe400cf" (UID: "6428ca59-598a-417c-a4b0-5f1cabe400cf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.162162 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6428ca59-598a-417c-a4b0-5f1cabe400cf" (UID: "6428ca59-598a-417c-a4b0-5f1cabe400cf"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.178998 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6428ca59-598a-417c-a4b0-5f1cabe400cf" (UID: "6428ca59-598a-417c-a4b0-5f1cabe400cf"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.254788 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.254843 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.254863 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6428ca59-598a-417c-a4b0-5f1cabe400cf-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.619543 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6879fcbdc7-xgzr6" event={"ID":"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1","Type":"ContainerStarted","Data":"8f7118bc51e624fc83d241708fa7793b9fc18ea10bcad4284f453208f1ae3f69"} Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.620888 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.634317 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" event={"ID":"6428ca59-598a-417c-a4b0-5f1cabe400cf","Type":"ContainerDied","Data":"ae86b4ab7cea6a94c640f6d2dbee1365ec42fa8c53538f6d66fdc9a287f59f55"} Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.634384 4869 scope.go:117] "RemoveContainer" containerID="d20a5b414c144535904b5d82437c17681cd88daa6480c3e93f4b6c8692713739" Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.634543 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.664576 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-mr25n" event={"ID":"5ecba8db-afd9-4f76-b5f1-61acfb49bd68","Type":"ContainerStarted","Data":"10cf6b361a191a48180d5a15a376bed618b2ddcb19f98a9c3a51139c9385393c"} Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.681439 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-5ctps" event={"ID":"03699fa5-87c3-42b4-907b-586fa9d208af","Type":"ContainerStarted","Data":"685feecd6eaaece16ff8456bc67ddc2bf170e620ad33736fe1c4a7ee80e38f46"} Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.682627 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-6879fcbdc7-xgzr6" podStartSLOduration=2.682606812 podStartE2EDuration="2.682606812s" podCreationTimestamp="2026-01-30 11:12:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:12:52.676985923 +0000 UTC m=+1123.226861999" watchObservedRunningTime="2026-01-30 11:12:52.682606812 +0000 UTC m=+1123.232482868" Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.730628 4869 scope.go:117] "RemoveContainer" containerID="d835c138d83c59a91f1630d5f7b615640b0f815409047e370278c622d8e4c396" Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.739795 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-mr25n" podStartSLOduration=3.5096609560000003 podStartE2EDuration="36.739762555s" podCreationTimestamp="2026-01-30 11:12:16 +0000 UTC" firstStartedPulling="2026-01-30 11:12:18.555892775 +0000 UTC m=+1089.105768841" lastFinishedPulling="2026-01-30 11:12:51.785994384 +0000 UTC m=+1122.335870440" observedRunningTime="2026-01-30 11:12:52.72728097 +0000 UTC m=+1123.277157036" watchObservedRunningTime="2026-01-30 11:12:52.739762555 +0000 UTC m=+1123.289638621" Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.772567 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-5jwbz"] Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.800771 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-5jwbz"] Jan 30 11:12:52 crc kubenswrapper[4869]: I0130 11:12:52.801050 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-5ctps" podStartSLOduration=4.265807667 podStartE2EDuration="36.801013993s" podCreationTimestamp="2026-01-30 11:12:16 +0000 UTC" firstStartedPulling="2026-01-30 11:12:18.208208597 +0000 UTC m=+1088.758084663" lastFinishedPulling="2026-01-30 11:12:50.743414923 +0000 UTC m=+1121.293290989" observedRunningTime="2026-01-30 11:12:52.791066081 +0000 UTC m=+1123.340942147" watchObservedRunningTime="2026-01-30 11:12:52.801013993 +0000 UTC m=+1123.350890059" Jan 30 11:12:54 crc kubenswrapper[4869]: I0130 11:12:54.159773 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6428ca59-598a-417c-a4b0-5f1cabe400cf" path="/var/lib/kubelet/pods/6428ca59-598a-417c-a4b0-5f1cabe400cf/volumes" Jan 30 11:12:55 crc kubenswrapper[4869]: I0130 11:12:55.712889 4869 generic.go:334] "Generic (PLEG): container finished" podID="5ecba8db-afd9-4f76-b5f1-61acfb49bd68" containerID="10cf6b361a191a48180d5a15a376bed618b2ddcb19f98a9c3a51139c9385393c" exitCode=0 Jan 30 11:12:55 crc kubenswrapper[4869]: I0130 11:12:55.712932 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-mr25n" event={"ID":"5ecba8db-afd9-4f76-b5f1-61acfb49bd68","Type":"ContainerDied","Data":"10cf6b361a191a48180d5a15a376bed618b2ddcb19f98a9c3a51139c9385393c"} Jan 30 11:12:56 crc kubenswrapper[4869]: I0130 11:12:56.848103 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-56df8fb6b7-5jwbz" podUID="6428ca59-598a-417c-a4b0-5f1cabe400cf" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.143:5353: i/o timeout" Jan 30 11:12:58 crc kubenswrapper[4869]: I0130 11:12:58.386998 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-mr25n" Jan 30 11:12:58 crc kubenswrapper[4869]: I0130 11:12:58.587447 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ecba8db-afd9-4f76-b5f1-61acfb49bd68-combined-ca-bundle\") pod \"5ecba8db-afd9-4f76-b5f1-61acfb49bd68\" (UID: \"5ecba8db-afd9-4f76-b5f1-61acfb49bd68\") " Jan 30 11:12:58 crc kubenswrapper[4869]: I0130 11:12:58.587631 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lxjkd\" (UniqueName: \"kubernetes.io/projected/5ecba8db-afd9-4f76-b5f1-61acfb49bd68-kube-api-access-lxjkd\") pod \"5ecba8db-afd9-4f76-b5f1-61acfb49bd68\" (UID: \"5ecba8db-afd9-4f76-b5f1-61acfb49bd68\") " Jan 30 11:12:58 crc kubenswrapper[4869]: I0130 11:12:58.587682 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5ecba8db-afd9-4f76-b5f1-61acfb49bd68-db-sync-config-data\") pod \"5ecba8db-afd9-4f76-b5f1-61acfb49bd68\" (UID: \"5ecba8db-afd9-4f76-b5f1-61acfb49bd68\") " Jan 30 11:12:58 crc kubenswrapper[4869]: I0130 11:12:58.595111 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ecba8db-afd9-4f76-b5f1-61acfb49bd68-kube-api-access-lxjkd" (OuterVolumeSpecName: "kube-api-access-lxjkd") pod "5ecba8db-afd9-4f76-b5f1-61acfb49bd68" (UID: "5ecba8db-afd9-4f76-b5f1-61acfb49bd68"). InnerVolumeSpecName "kube-api-access-lxjkd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:12:58 crc kubenswrapper[4869]: I0130 11:12:58.595456 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ecba8db-afd9-4f76-b5f1-61acfb49bd68-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "5ecba8db-afd9-4f76-b5f1-61acfb49bd68" (UID: "5ecba8db-afd9-4f76-b5f1-61acfb49bd68"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:58 crc kubenswrapper[4869]: I0130 11:12:58.616016 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ecba8db-afd9-4f76-b5f1-61acfb49bd68-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5ecba8db-afd9-4f76-b5f1-61acfb49bd68" (UID: "5ecba8db-afd9-4f76-b5f1-61acfb49bd68"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:12:58 crc kubenswrapper[4869]: I0130 11:12:58.692616 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lxjkd\" (UniqueName: \"kubernetes.io/projected/5ecba8db-afd9-4f76-b5f1-61acfb49bd68-kube-api-access-lxjkd\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:58 crc kubenswrapper[4869]: I0130 11:12:58.692656 4869 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5ecba8db-afd9-4f76-b5f1-61acfb49bd68-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:58 crc kubenswrapper[4869]: I0130 11:12:58.692669 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ecba8db-afd9-4f76-b5f1-61acfb49bd68-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:12:58 crc kubenswrapper[4869]: I0130 11:12:58.745921 4869 generic.go:334] "Generic (PLEG): container finished" podID="03699fa5-87c3-42b4-907b-586fa9d208af" containerID="685feecd6eaaece16ff8456bc67ddc2bf170e620ad33736fe1c4a7ee80e38f46" exitCode=0 Jan 30 11:12:58 crc kubenswrapper[4869]: I0130 11:12:58.746039 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-5ctps" event={"ID":"03699fa5-87c3-42b4-907b-586fa9d208af","Type":"ContainerDied","Data":"685feecd6eaaece16ff8456bc67ddc2bf170e620ad33736fe1c4a7ee80e38f46"} Jan 30 11:12:58 crc kubenswrapper[4869]: I0130 11:12:58.748244 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-mr25n" event={"ID":"5ecba8db-afd9-4f76-b5f1-61acfb49bd68","Type":"ContainerDied","Data":"1b87b05e98442937239653203ad84ca6db24618a26106756a989d99a7a90b29c"} Jan 30 11:12:58 crc kubenswrapper[4869]: I0130 11:12:58.748277 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1b87b05e98442937239653203ad84ca6db24618a26106756a989d99a7a90b29c" Jan 30 11:12:58 crc kubenswrapper[4869]: I0130 11:12:58.748319 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-mr25n" Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.736801 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-66466f9898-mzt77"] Jan 30 11:12:59 crc kubenswrapper[4869]: E0130 11:12:59.737480 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6428ca59-598a-417c-a4b0-5f1cabe400cf" containerName="dnsmasq-dns" Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.737492 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="6428ca59-598a-417c-a4b0-5f1cabe400cf" containerName="dnsmasq-dns" Jan 30 11:12:59 crc kubenswrapper[4869]: E0130 11:12:59.737510 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ecba8db-afd9-4f76-b5f1-61acfb49bd68" containerName="barbican-db-sync" Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.737518 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ecba8db-afd9-4f76-b5f1-61acfb49bd68" containerName="barbican-db-sync" Jan 30 11:12:59 crc kubenswrapper[4869]: E0130 11:12:59.737542 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6428ca59-598a-417c-a4b0-5f1cabe400cf" containerName="init" Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.737548 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="6428ca59-598a-417c-a4b0-5f1cabe400cf" containerName="init" Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.737731 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ecba8db-afd9-4f76-b5f1-61acfb49bd68" containerName="barbican-db-sync" Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.737755 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="6428ca59-598a-417c-a4b0-5f1cabe400cf" containerName="dnsmasq-dns" Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.738677 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-66466f9898-mzt77" Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.741080 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.741301 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.741423 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-fnx6m" Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.750935 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-65d8584cdc-bgnk8"] Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.752639 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-65d8584cdc-bgnk8" Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.754848 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.765465 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-66466f9898-mzt77"] Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.771857 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="46a44949-0829-489c-8baf-31966a61641f" containerName="ceilometer-central-agent" containerID="cri-o://653553e1c2623f2d21168f37e3a4fd911742e0cfccadfdd017b3ae36afa31bdb" gracePeriod=30 Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.773603 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"46a44949-0829-489c-8baf-31966a61641f","Type":"ContainerStarted","Data":"6575c82b56ef2a8f339dd3823558a1701e499dd058b41adb6d86d82b00df3e22"} Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.777832 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.774396 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="46a44949-0829-489c-8baf-31966a61641f" containerName="proxy-httpd" containerID="cri-o://6575c82b56ef2a8f339dd3823558a1701e499dd058b41adb6d86d82b00df3e22" gracePeriod=30 Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.774406 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="46a44949-0829-489c-8baf-31966a61641f" containerName="ceilometer-notification-agent" containerID="cri-o://ef2be8647a33a563b522128ed9461507b42ca0e66e663c90aec39048781728d9" gracePeriod=30 Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.774369 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="46a44949-0829-489c-8baf-31966a61641f" containerName="sg-core" containerID="cri-o://9ff07e90770d898c4b45efedf3a39d283ddced6f78bc7b59dd02d8279f528a0e" gracePeriod=30 Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.778520 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-65d8584cdc-bgnk8"] Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.859578 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-6hhjj"] Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.861095 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-6hhjj" Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.870088 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.837735217 podStartE2EDuration="43.870061769s" podCreationTimestamp="2026-01-30 11:12:16 +0000 UTC" firstStartedPulling="2026-01-30 11:12:18.8019862 +0000 UTC m=+1089.351862256" lastFinishedPulling="2026-01-30 11:12:58.834312742 +0000 UTC m=+1129.384188808" observedRunningTime="2026-01-30 11:12:59.851888463 +0000 UTC m=+1130.401764529" watchObservedRunningTime="2026-01-30 11:12:59.870061769 +0000 UTC m=+1130.419937835" Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.896995 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-6hhjj"] Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.916134 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/45d8f6aa-887f-444b-81c8-7bf6c03993c9-config-data-custom\") pod \"barbican-keystone-listener-66466f9898-mzt77\" (UID: \"45d8f6aa-887f-444b-81c8-7bf6c03993c9\") " pod="openstack/barbican-keystone-listener-66466f9898-mzt77" Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.916200 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/104ca851-1c21-41bd-8a92-423fdab83753-logs\") pod \"barbican-worker-65d8584cdc-bgnk8\" (UID: \"104ca851-1c21-41bd-8a92-423fdab83753\") " pod="openstack/barbican-worker-65d8584cdc-bgnk8" Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.916260 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/104ca851-1c21-41bd-8a92-423fdab83753-config-data-custom\") pod \"barbican-worker-65d8584cdc-bgnk8\" (UID: \"104ca851-1c21-41bd-8a92-423fdab83753\") " pod="openstack/barbican-worker-65d8584cdc-bgnk8" Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.916288 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8gbk\" (UniqueName: \"kubernetes.io/projected/45d8f6aa-887f-444b-81c8-7bf6c03993c9-kube-api-access-t8gbk\") pod \"barbican-keystone-listener-66466f9898-mzt77\" (UID: \"45d8f6aa-887f-444b-81c8-7bf6c03993c9\") " pod="openstack/barbican-keystone-listener-66466f9898-mzt77" Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.916327 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/104ca851-1c21-41bd-8a92-423fdab83753-combined-ca-bundle\") pod \"barbican-worker-65d8584cdc-bgnk8\" (UID: \"104ca851-1c21-41bd-8a92-423fdab83753\") " pod="openstack/barbican-worker-65d8584cdc-bgnk8" Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.916361 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/45d8f6aa-887f-444b-81c8-7bf6c03993c9-logs\") pod \"barbican-keystone-listener-66466f9898-mzt77\" (UID: \"45d8f6aa-887f-444b-81c8-7bf6c03993c9\") " pod="openstack/barbican-keystone-listener-66466f9898-mzt77" Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.916387 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/104ca851-1c21-41bd-8a92-423fdab83753-config-data\") pod \"barbican-worker-65d8584cdc-bgnk8\" (UID: \"104ca851-1c21-41bd-8a92-423fdab83753\") " pod="openstack/barbican-worker-65d8584cdc-bgnk8" Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.916434 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45d8f6aa-887f-444b-81c8-7bf6c03993c9-combined-ca-bundle\") pod \"barbican-keystone-listener-66466f9898-mzt77\" (UID: \"45d8f6aa-887f-444b-81c8-7bf6c03993c9\") " pod="openstack/barbican-keystone-listener-66466f9898-mzt77" Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.916468 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45d8f6aa-887f-444b-81c8-7bf6c03993c9-config-data\") pod \"barbican-keystone-listener-66466f9898-mzt77\" (UID: \"45d8f6aa-887f-444b-81c8-7bf6c03993c9\") " pod="openstack/barbican-keystone-listener-66466f9898-mzt77" Jan 30 11:12:59 crc kubenswrapper[4869]: I0130 11:12:59.916499 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gwkm\" (UniqueName: \"kubernetes.io/projected/104ca851-1c21-41bd-8a92-423fdab83753-kube-api-access-2gwkm\") pod \"barbican-worker-65d8584cdc-bgnk8\" (UID: \"104ca851-1c21-41bd-8a92-423fdab83753\") " pod="openstack/barbican-worker-65d8584cdc-bgnk8" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.017928 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45d8f6aa-887f-444b-81c8-7bf6c03993c9-combined-ca-bundle\") pod \"barbican-keystone-listener-66466f9898-mzt77\" (UID: \"45d8f6aa-887f-444b-81c8-7bf6c03993c9\") " pod="openstack/barbican-keystone-listener-66466f9898-mzt77" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.017990 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-6hhjj\" (UID: \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\") " pod="openstack/dnsmasq-dns-848cf88cfc-6hhjj" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.018014 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45d8f6aa-887f-444b-81c8-7bf6c03993c9-config-data\") pod \"barbican-keystone-listener-66466f9898-mzt77\" (UID: \"45d8f6aa-887f-444b-81c8-7bf6c03993c9\") " pod="openstack/barbican-keystone-listener-66466f9898-mzt77" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.018051 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-6hhjj\" (UID: \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\") " pod="openstack/dnsmasq-dns-848cf88cfc-6hhjj" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.018074 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gwkm\" (UniqueName: \"kubernetes.io/projected/104ca851-1c21-41bd-8a92-423fdab83753-kube-api-access-2gwkm\") pod \"barbican-worker-65d8584cdc-bgnk8\" (UID: \"104ca851-1c21-41bd-8a92-423fdab83753\") " pod="openstack/barbican-worker-65d8584cdc-bgnk8" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.018096 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/45d8f6aa-887f-444b-81c8-7bf6c03993c9-config-data-custom\") pod \"barbican-keystone-listener-66466f9898-mzt77\" (UID: \"45d8f6aa-887f-444b-81c8-7bf6c03993c9\") " pod="openstack/barbican-keystone-listener-66466f9898-mzt77" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.018124 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-6hhjj\" (UID: \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\") " pod="openstack/dnsmasq-dns-848cf88cfc-6hhjj" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.018144 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/104ca851-1c21-41bd-8a92-423fdab83753-logs\") pod \"barbican-worker-65d8584cdc-bgnk8\" (UID: \"104ca851-1c21-41bd-8a92-423fdab83753\") " pod="openstack/barbican-worker-65d8584cdc-bgnk8" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.018166 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8gbk\" (UniqueName: \"kubernetes.io/projected/45d8f6aa-887f-444b-81c8-7bf6c03993c9-kube-api-access-t8gbk\") pod \"barbican-keystone-listener-66466f9898-mzt77\" (UID: \"45d8f6aa-887f-444b-81c8-7bf6c03993c9\") " pod="openstack/barbican-keystone-listener-66466f9898-mzt77" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.018183 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/104ca851-1c21-41bd-8a92-423fdab83753-config-data-custom\") pod \"barbican-worker-65d8584cdc-bgnk8\" (UID: \"104ca851-1c21-41bd-8a92-423fdab83753\") " pod="openstack/barbican-worker-65d8584cdc-bgnk8" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.018216 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/104ca851-1c21-41bd-8a92-423fdab83753-combined-ca-bundle\") pod \"barbican-worker-65d8584cdc-bgnk8\" (UID: \"104ca851-1c21-41bd-8a92-423fdab83753\") " pod="openstack/barbican-worker-65d8584cdc-bgnk8" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.018237 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-6hhjj\" (UID: \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\") " pod="openstack/dnsmasq-dns-848cf88cfc-6hhjj" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.018264 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/45d8f6aa-887f-444b-81c8-7bf6c03993c9-logs\") pod \"barbican-keystone-listener-66466f9898-mzt77\" (UID: \"45d8f6aa-887f-444b-81c8-7bf6c03993c9\") " pod="openstack/barbican-keystone-listener-66466f9898-mzt77" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.018286 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-config\") pod \"dnsmasq-dns-848cf88cfc-6hhjj\" (UID: \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\") " pod="openstack/dnsmasq-dns-848cf88cfc-6hhjj" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.018304 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/104ca851-1c21-41bd-8a92-423fdab83753-config-data\") pod \"barbican-worker-65d8584cdc-bgnk8\" (UID: \"104ca851-1c21-41bd-8a92-423fdab83753\") " pod="openstack/barbican-worker-65d8584cdc-bgnk8" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.018322 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stjml\" (UniqueName: \"kubernetes.io/projected/daad33e6-c765-4fcb-94e2-9ff5f244cffd-kube-api-access-stjml\") pod \"dnsmasq-dns-848cf88cfc-6hhjj\" (UID: \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\") " pod="openstack/dnsmasq-dns-848cf88cfc-6hhjj" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.027447 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/45d8f6aa-887f-444b-81c8-7bf6c03993c9-logs\") pod \"barbican-keystone-listener-66466f9898-mzt77\" (UID: \"45d8f6aa-887f-444b-81c8-7bf6c03993c9\") " pod="openstack/barbican-keystone-listener-66466f9898-mzt77" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.028870 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45d8f6aa-887f-444b-81c8-7bf6c03993c9-config-data\") pod \"barbican-keystone-listener-66466f9898-mzt77\" (UID: \"45d8f6aa-887f-444b-81c8-7bf6c03993c9\") " pod="openstack/barbican-keystone-listener-66466f9898-mzt77" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.029356 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45d8f6aa-887f-444b-81c8-7bf6c03993c9-combined-ca-bundle\") pod \"barbican-keystone-listener-66466f9898-mzt77\" (UID: \"45d8f6aa-887f-444b-81c8-7bf6c03993c9\") " pod="openstack/barbican-keystone-listener-66466f9898-mzt77" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.029594 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/104ca851-1c21-41bd-8a92-423fdab83753-config-data-custom\") pod \"barbican-worker-65d8584cdc-bgnk8\" (UID: \"104ca851-1c21-41bd-8a92-423fdab83753\") " pod="openstack/barbican-worker-65d8584cdc-bgnk8" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.029913 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/104ca851-1c21-41bd-8a92-423fdab83753-logs\") pod \"barbican-worker-65d8584cdc-bgnk8\" (UID: \"104ca851-1c21-41bd-8a92-423fdab83753\") " pod="openstack/barbican-worker-65d8584cdc-bgnk8" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.031150 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/104ca851-1c21-41bd-8a92-423fdab83753-config-data\") pod \"barbican-worker-65d8584cdc-bgnk8\" (UID: \"104ca851-1c21-41bd-8a92-423fdab83753\") " pod="openstack/barbican-worker-65d8584cdc-bgnk8" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.041827 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/104ca851-1c21-41bd-8a92-423fdab83753-combined-ca-bundle\") pod \"barbican-worker-65d8584cdc-bgnk8\" (UID: \"104ca851-1c21-41bd-8a92-423fdab83753\") " pod="openstack/barbican-worker-65d8584cdc-bgnk8" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.045273 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/45d8f6aa-887f-444b-81c8-7bf6c03993c9-config-data-custom\") pod \"barbican-keystone-listener-66466f9898-mzt77\" (UID: \"45d8f6aa-887f-444b-81c8-7bf6c03993c9\") " pod="openstack/barbican-keystone-listener-66466f9898-mzt77" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.075487 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8gbk\" (UniqueName: \"kubernetes.io/projected/45d8f6aa-887f-444b-81c8-7bf6c03993c9-kube-api-access-t8gbk\") pod \"barbican-keystone-listener-66466f9898-mzt77\" (UID: \"45d8f6aa-887f-444b-81c8-7bf6c03993c9\") " pod="openstack/barbican-keystone-listener-66466f9898-mzt77" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.097312 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gwkm\" (UniqueName: \"kubernetes.io/projected/104ca851-1c21-41bd-8a92-423fdab83753-kube-api-access-2gwkm\") pod \"barbican-worker-65d8584cdc-bgnk8\" (UID: \"104ca851-1c21-41bd-8a92-423fdab83753\") " pod="openstack/barbican-worker-65d8584cdc-bgnk8" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.120787 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6444c94f66-mzq6m"] Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.122383 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6444c94f66-mzq6m" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.127402 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-6hhjj\" (UID: \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\") " pod="openstack/dnsmasq-dns-848cf88cfc-6hhjj" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.127461 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-6hhjj\" (UID: \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\") " pod="openstack/dnsmasq-dns-848cf88cfc-6hhjj" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.127494 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfs9n\" (UniqueName: \"kubernetes.io/projected/d1567b59-87d2-4eea-925a-d1b9d8a27e24-kube-api-access-jfs9n\") pod \"barbican-api-6444c94f66-mzq6m\" (UID: \"d1567b59-87d2-4eea-925a-d1b9d8a27e24\") " pod="openstack/barbican-api-6444c94f66-mzq6m" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.127521 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-config\") pod \"dnsmasq-dns-848cf88cfc-6hhjj\" (UID: \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\") " pod="openstack/dnsmasq-dns-848cf88cfc-6hhjj" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.127538 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stjml\" (UniqueName: \"kubernetes.io/projected/daad33e6-c765-4fcb-94e2-9ff5f244cffd-kube-api-access-stjml\") pod \"dnsmasq-dns-848cf88cfc-6hhjj\" (UID: \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\") " pod="openstack/dnsmasq-dns-848cf88cfc-6hhjj" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.127565 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1567b59-87d2-4eea-925a-d1b9d8a27e24-combined-ca-bundle\") pod \"barbican-api-6444c94f66-mzq6m\" (UID: \"d1567b59-87d2-4eea-925a-d1b9d8a27e24\") " pod="openstack/barbican-api-6444c94f66-mzq6m" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.127596 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d1567b59-87d2-4eea-925a-d1b9d8a27e24-logs\") pod \"barbican-api-6444c94f66-mzq6m\" (UID: \"d1567b59-87d2-4eea-925a-d1b9d8a27e24\") " pod="openstack/barbican-api-6444c94f66-mzq6m" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.127625 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-6hhjj\" (UID: \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\") " pod="openstack/dnsmasq-dns-848cf88cfc-6hhjj" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.127642 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d1567b59-87d2-4eea-925a-d1b9d8a27e24-config-data-custom\") pod \"barbican-api-6444c94f66-mzq6m\" (UID: \"d1567b59-87d2-4eea-925a-d1b9d8a27e24\") " pod="openstack/barbican-api-6444c94f66-mzq6m" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.127676 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-6hhjj\" (UID: \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\") " pod="openstack/dnsmasq-dns-848cf88cfc-6hhjj" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.127692 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1567b59-87d2-4eea-925a-d1b9d8a27e24-config-data\") pod \"barbican-api-6444c94f66-mzq6m\" (UID: \"d1567b59-87d2-4eea-925a-d1b9d8a27e24\") " pod="openstack/barbican-api-6444c94f66-mzq6m" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.128588 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-6hhjj\" (UID: \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\") " pod="openstack/dnsmasq-dns-848cf88cfc-6hhjj" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.128664 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-6hhjj\" (UID: \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\") " pod="openstack/dnsmasq-dns-848cf88cfc-6hhjj" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.130526 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-config\") pod \"dnsmasq-dns-848cf88cfc-6hhjj\" (UID: \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\") " pod="openstack/dnsmasq-dns-848cf88cfc-6hhjj" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.131582 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-6hhjj\" (UID: \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\") " pod="openstack/dnsmasq-dns-848cf88cfc-6hhjj" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.140271 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.145904 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-6hhjj\" (UID: \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\") " pod="openstack/dnsmasq-dns-848cf88cfc-6hhjj" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.194831 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6444c94f66-mzq6m"] Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.200606 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stjml\" (UniqueName: \"kubernetes.io/projected/daad33e6-c765-4fcb-94e2-9ff5f244cffd-kube-api-access-stjml\") pod \"dnsmasq-dns-848cf88cfc-6hhjj\" (UID: \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\") " pod="openstack/dnsmasq-dns-848cf88cfc-6hhjj" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.231742 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1567b59-87d2-4eea-925a-d1b9d8a27e24-combined-ca-bundle\") pod \"barbican-api-6444c94f66-mzq6m\" (UID: \"d1567b59-87d2-4eea-925a-d1b9d8a27e24\") " pod="openstack/barbican-api-6444c94f66-mzq6m" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.231830 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d1567b59-87d2-4eea-925a-d1b9d8a27e24-logs\") pod \"barbican-api-6444c94f66-mzq6m\" (UID: \"d1567b59-87d2-4eea-925a-d1b9d8a27e24\") " pod="openstack/barbican-api-6444c94f66-mzq6m" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.231886 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d1567b59-87d2-4eea-925a-d1b9d8a27e24-config-data-custom\") pod \"barbican-api-6444c94f66-mzq6m\" (UID: \"d1567b59-87d2-4eea-925a-d1b9d8a27e24\") " pod="openstack/barbican-api-6444c94f66-mzq6m" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.231952 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1567b59-87d2-4eea-925a-d1b9d8a27e24-config-data\") pod \"barbican-api-6444c94f66-mzq6m\" (UID: \"d1567b59-87d2-4eea-925a-d1b9d8a27e24\") " pod="openstack/barbican-api-6444c94f66-mzq6m" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.232043 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfs9n\" (UniqueName: \"kubernetes.io/projected/d1567b59-87d2-4eea-925a-d1b9d8a27e24-kube-api-access-jfs9n\") pod \"barbican-api-6444c94f66-mzq6m\" (UID: \"d1567b59-87d2-4eea-925a-d1b9d8a27e24\") " pod="openstack/barbican-api-6444c94f66-mzq6m" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.236187 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d1567b59-87d2-4eea-925a-d1b9d8a27e24-config-data-custom\") pod \"barbican-api-6444c94f66-mzq6m\" (UID: \"d1567b59-87d2-4eea-925a-d1b9d8a27e24\") " pod="openstack/barbican-api-6444c94f66-mzq6m" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.236743 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d1567b59-87d2-4eea-925a-d1b9d8a27e24-logs\") pod \"barbican-api-6444c94f66-mzq6m\" (UID: \"d1567b59-87d2-4eea-925a-d1b9d8a27e24\") " pod="openstack/barbican-api-6444c94f66-mzq6m" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.240742 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1567b59-87d2-4eea-925a-d1b9d8a27e24-combined-ca-bundle\") pod \"barbican-api-6444c94f66-mzq6m\" (UID: \"d1567b59-87d2-4eea-925a-d1b9d8a27e24\") " pod="openstack/barbican-api-6444c94f66-mzq6m" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.242579 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1567b59-87d2-4eea-925a-d1b9d8a27e24-config-data\") pod \"barbican-api-6444c94f66-mzq6m\" (UID: \"d1567b59-87d2-4eea-925a-d1b9d8a27e24\") " pod="openstack/barbican-api-6444c94f66-mzq6m" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.255357 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfs9n\" (UniqueName: \"kubernetes.io/projected/d1567b59-87d2-4eea-925a-d1b9d8a27e24-kube-api-access-jfs9n\") pod \"barbican-api-6444c94f66-mzq6m\" (UID: \"d1567b59-87d2-4eea-925a-d1b9d8a27e24\") " pod="openstack/barbican-api-6444c94f66-mzq6m" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.268111 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-6hhjj" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.283167 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-5ctps" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.364442 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-66466f9898-mzt77" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.395183 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-65d8584cdc-bgnk8" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.435539 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dxhwv\" (UniqueName: \"kubernetes.io/projected/03699fa5-87c3-42b4-907b-586fa9d208af-kube-api-access-dxhwv\") pod \"03699fa5-87c3-42b4-907b-586fa9d208af\" (UID: \"03699fa5-87c3-42b4-907b-586fa9d208af\") " Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.435642 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03699fa5-87c3-42b4-907b-586fa9d208af-scripts\") pod \"03699fa5-87c3-42b4-907b-586fa9d208af\" (UID: \"03699fa5-87c3-42b4-907b-586fa9d208af\") " Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.436151 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/03699fa5-87c3-42b4-907b-586fa9d208af-db-sync-config-data\") pod \"03699fa5-87c3-42b4-907b-586fa9d208af\" (UID: \"03699fa5-87c3-42b4-907b-586fa9d208af\") " Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.436172 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03699fa5-87c3-42b4-907b-586fa9d208af-config-data\") pod \"03699fa5-87c3-42b4-907b-586fa9d208af\" (UID: \"03699fa5-87c3-42b4-907b-586fa9d208af\") " Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.436190 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/03699fa5-87c3-42b4-907b-586fa9d208af-etc-machine-id\") pod \"03699fa5-87c3-42b4-907b-586fa9d208af\" (UID: \"03699fa5-87c3-42b4-907b-586fa9d208af\") " Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.436251 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03699fa5-87c3-42b4-907b-586fa9d208af-combined-ca-bundle\") pod \"03699fa5-87c3-42b4-907b-586fa9d208af\" (UID: \"03699fa5-87c3-42b4-907b-586fa9d208af\") " Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.439575 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/03699fa5-87c3-42b4-907b-586fa9d208af-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "03699fa5-87c3-42b4-907b-586fa9d208af" (UID: "03699fa5-87c3-42b4-907b-586fa9d208af"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.447782 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03699fa5-87c3-42b4-907b-586fa9d208af-scripts" (OuterVolumeSpecName: "scripts") pod "03699fa5-87c3-42b4-907b-586fa9d208af" (UID: "03699fa5-87c3-42b4-907b-586fa9d208af"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.460155 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03699fa5-87c3-42b4-907b-586fa9d208af-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "03699fa5-87c3-42b4-907b-586fa9d208af" (UID: "03699fa5-87c3-42b4-907b-586fa9d208af"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.460183 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03699fa5-87c3-42b4-907b-586fa9d208af-kube-api-access-dxhwv" (OuterVolumeSpecName: "kube-api-access-dxhwv") pod "03699fa5-87c3-42b4-907b-586fa9d208af" (UID: "03699fa5-87c3-42b4-907b-586fa9d208af"). InnerVolumeSpecName "kube-api-access-dxhwv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.473438 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6444c94f66-mzq6m" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.501760 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03699fa5-87c3-42b4-907b-586fa9d208af-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "03699fa5-87c3-42b4-907b-586fa9d208af" (UID: "03699fa5-87c3-42b4-907b-586fa9d208af"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.538325 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03699fa5-87c3-42b4-907b-586fa9d208af-config-data" (OuterVolumeSpecName: "config-data") pod "03699fa5-87c3-42b4-907b-586fa9d208af" (UID: "03699fa5-87c3-42b4-907b-586fa9d208af"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.546056 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03699fa5-87c3-42b4-907b-586fa9d208af-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.546090 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dxhwv\" (UniqueName: \"kubernetes.io/projected/03699fa5-87c3-42b4-907b-586fa9d208af-kube-api-access-dxhwv\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.546100 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03699fa5-87c3-42b4-907b-586fa9d208af-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.546109 4869 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/03699fa5-87c3-42b4-907b-586fa9d208af-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.546118 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03699fa5-87c3-42b4-907b-586fa9d208af-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.546129 4869 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/03699fa5-87c3-42b4-907b-586fa9d208af-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.818037 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-6hhjj"] Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.818341 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-5ctps" event={"ID":"03699fa5-87c3-42b4-907b-586fa9d208af","Type":"ContainerDied","Data":"30a1f78919c5f2985046d4aedc542ed19f05c1b985bd0938356d82396a65b898"} Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.818387 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="30a1f78919c5f2985046d4aedc542ed19f05c1b985bd0938356d82396a65b898" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.818416 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-5ctps" Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.826064 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"46a44949-0829-489c-8baf-31966a61641f","Type":"ContainerDied","Data":"6575c82b56ef2a8f339dd3823558a1701e499dd058b41adb6d86d82b00df3e22"} Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.824877 4869 generic.go:334] "Generic (PLEG): container finished" podID="46a44949-0829-489c-8baf-31966a61641f" containerID="6575c82b56ef2a8f339dd3823558a1701e499dd058b41adb6d86d82b00df3e22" exitCode=0 Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.826148 4869 generic.go:334] "Generic (PLEG): container finished" podID="46a44949-0829-489c-8baf-31966a61641f" containerID="9ff07e90770d898c4b45efedf3a39d283ddced6f78bc7b59dd02d8279f528a0e" exitCode=2 Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.826161 4869 generic.go:334] "Generic (PLEG): container finished" podID="46a44949-0829-489c-8baf-31966a61641f" containerID="653553e1c2623f2d21168f37e3a4fd911742e0cfccadfdd017b3ae36afa31bdb" exitCode=0 Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.826231 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"46a44949-0829-489c-8baf-31966a61641f","Type":"ContainerDied","Data":"9ff07e90770d898c4b45efedf3a39d283ddced6f78bc7b59dd02d8279f528a0e"} Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.826261 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"46a44949-0829-489c-8baf-31966a61641f","Type":"ContainerDied","Data":"653553e1c2623f2d21168f37e3a4fd911742e0cfccadfdd017b3ae36afa31bdb"} Jan 30 11:13:00 crc kubenswrapper[4869]: I0130 11:13:00.961219 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-66466f9898-mzt77"] Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.026686 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jan 30 11:13:01 crc kubenswrapper[4869]: E0130 11:13:01.027485 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03699fa5-87c3-42b4-907b-586fa9d208af" containerName="cinder-db-sync" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.027505 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="03699fa5-87c3-42b4-907b-586fa9d208af" containerName="cinder-db-sync" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.028656 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="03699fa5-87c3-42b4-907b-586fa9d208af" containerName="cinder-db-sync" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.030698 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.037149 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-8hjgx" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.037215 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.037281 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.037351 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.066627 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.110939 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-65d8584cdc-bgnk8"] Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.128912 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-6hhjj"] Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.156380 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9daa784c-29c0-4086-a318-a9c4b73a6244-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9daa784c-29c0-4086-a318-a9c4b73a6244\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.156498 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t659s\" (UniqueName: \"kubernetes.io/projected/9daa784c-29c0-4086-a318-a9c4b73a6244-kube-api-access-t659s\") pod \"cinder-scheduler-0\" (UID: \"9daa784c-29c0-4086-a318-a9c4b73a6244\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.156537 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9daa784c-29c0-4086-a318-a9c4b73a6244-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9daa784c-29c0-4086-a318-a9c4b73a6244\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.156613 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9daa784c-29c0-4086-a318-a9c4b73a6244-scripts\") pod \"cinder-scheduler-0\" (UID: \"9daa784c-29c0-4086-a318-a9c4b73a6244\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.156659 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9daa784c-29c0-4086-a318-a9c4b73a6244-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9daa784c-29c0-4086-a318-a9c4b73a6244\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.156695 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9daa784c-29c0-4086-a318-a9c4b73a6244-config-data\") pod \"cinder-scheduler-0\" (UID: \"9daa784c-29c0-4086-a318-a9c4b73a6244\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.179591 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6444c94f66-mzq6m"] Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.197627 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-gxkk6"] Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.202345 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.203078 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-gxkk6"] Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.260216 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9daa784c-29c0-4086-a318-a9c4b73a6244-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9daa784c-29c0-4086-a318-a9c4b73a6244\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.260899 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t659s\" (UniqueName: \"kubernetes.io/projected/9daa784c-29c0-4086-a318-a9c4b73a6244-kube-api-access-t659s\") pod \"cinder-scheduler-0\" (UID: \"9daa784c-29c0-4086-a318-a9c4b73a6244\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.260989 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9daa784c-29c0-4086-a318-a9c4b73a6244-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9daa784c-29c0-4086-a318-a9c4b73a6244\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.261135 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9daa784c-29c0-4086-a318-a9c4b73a6244-scripts\") pod \"cinder-scheduler-0\" (UID: \"9daa784c-29c0-4086-a318-a9c4b73a6244\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.261263 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9daa784c-29c0-4086-a318-a9c4b73a6244-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9daa784c-29c0-4086-a318-a9c4b73a6244\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.261442 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9daa784c-29c0-4086-a318-a9c4b73a6244-config-data\") pod \"cinder-scheduler-0\" (UID: \"9daa784c-29c0-4086-a318-a9c4b73a6244\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.263673 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9daa784c-29c0-4086-a318-a9c4b73a6244-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9daa784c-29c0-4086-a318-a9c4b73a6244\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.273890 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9daa784c-29c0-4086-a318-a9c4b73a6244-scripts\") pod \"cinder-scheduler-0\" (UID: \"9daa784c-29c0-4086-a318-a9c4b73a6244\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.274646 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9daa784c-29c0-4086-a318-a9c4b73a6244-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9daa784c-29c0-4086-a318-a9c4b73a6244\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.281447 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t659s\" (UniqueName: \"kubernetes.io/projected/9daa784c-29c0-4086-a318-a9c4b73a6244-kube-api-access-t659s\") pod \"cinder-scheduler-0\" (UID: \"9daa784c-29c0-4086-a318-a9c4b73a6244\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.288793 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9daa784c-29c0-4086-a318-a9c4b73a6244-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9daa784c-29c0-4086-a318-a9c4b73a6244\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.294559 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9daa784c-29c0-4086-a318-a9c4b73a6244-config-data\") pod \"cinder-scheduler-0\" (UID: \"9daa784c-29c0-4086-a318-a9c4b73a6244\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.363765 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-dns-svc\") pod \"dnsmasq-dns-6578955fd5-gxkk6\" (UID: \"c07567e1-764d-4544-8a1d-ae6826672ae1\") " pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.363888 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-gxkk6\" (UID: \"c07567e1-764d-4544-8a1d-ae6826672ae1\") " pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.363949 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-gxkk6\" (UID: \"c07567e1-764d-4544-8a1d-ae6826672ae1\") " pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.364102 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-gxkk6\" (UID: \"c07567e1-764d-4544-8a1d-ae6826672ae1\") " pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.364140 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqzdh\" (UniqueName: \"kubernetes.io/projected/c07567e1-764d-4544-8a1d-ae6826672ae1-kube-api-access-xqzdh\") pod \"dnsmasq-dns-6578955fd5-gxkk6\" (UID: \"c07567e1-764d-4544-8a1d-ae6826672ae1\") " pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.364184 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-config\") pod \"dnsmasq-dns-6578955fd5-gxkk6\" (UID: \"c07567e1-764d-4544-8a1d-ae6826672ae1\") " pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.368472 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.384658 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.386206 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.432134 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.438335 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.466369 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " pod="openstack/cinder-api-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.466494 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-config-data\") pod \"cinder-api-0\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " pod="openstack/cinder-api-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.466529 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-gxkk6\" (UID: \"c07567e1-764d-4544-8a1d-ae6826672ae1\") " pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.466548 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqzdh\" (UniqueName: \"kubernetes.io/projected/c07567e1-764d-4544-8a1d-ae6826672ae1-kube-api-access-xqzdh\") pod \"dnsmasq-dns-6578955fd5-gxkk6\" (UID: \"c07567e1-764d-4544-8a1d-ae6826672ae1\") " pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.466591 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-config\") pod \"dnsmasq-dns-6578955fd5-gxkk6\" (UID: \"c07567e1-764d-4544-8a1d-ae6826672ae1\") " pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.466681 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1e2c1d6-848c-419d-964d-a9ba0bb41313-logs\") pod \"cinder-api-0\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " pod="openstack/cinder-api-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.466736 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-dns-svc\") pod \"dnsmasq-dns-6578955fd5-gxkk6\" (UID: \"c07567e1-764d-4544-8a1d-ae6826672ae1\") " pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.466819 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-scripts\") pod \"cinder-api-0\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " pod="openstack/cinder-api-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.466854 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-gxkk6\" (UID: \"c07567e1-764d-4544-8a1d-ae6826672ae1\") " pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.466897 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f1e2c1d6-848c-419d-964d-a9ba0bb41313-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " pod="openstack/cinder-api-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.466939 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-gxkk6\" (UID: \"c07567e1-764d-4544-8a1d-ae6826672ae1\") " pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.466973 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-config-data-custom\") pod \"cinder-api-0\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " pod="openstack/cinder-api-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.467443 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcwc9\" (UniqueName: \"kubernetes.io/projected/f1e2c1d6-848c-419d-964d-a9ba0bb41313-kube-api-access-pcwc9\") pod \"cinder-api-0\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " pod="openstack/cinder-api-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.468482 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-gxkk6\" (UID: \"c07567e1-764d-4544-8a1d-ae6826672ae1\") " pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.469081 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-gxkk6\" (UID: \"c07567e1-764d-4544-8a1d-ae6826672ae1\") " pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.469151 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-gxkk6\" (UID: \"c07567e1-764d-4544-8a1d-ae6826672ae1\") " pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.469621 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-dns-svc\") pod \"dnsmasq-dns-6578955fd5-gxkk6\" (UID: \"c07567e1-764d-4544-8a1d-ae6826672ae1\") " pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.471146 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-config\") pod \"dnsmasq-dns-6578955fd5-gxkk6\" (UID: \"c07567e1-764d-4544-8a1d-ae6826672ae1\") " pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.484779 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqzdh\" (UniqueName: \"kubernetes.io/projected/c07567e1-764d-4544-8a1d-ae6826672ae1-kube-api-access-xqzdh\") pod \"dnsmasq-dns-6578955fd5-gxkk6\" (UID: \"c07567e1-764d-4544-8a1d-ae6826672ae1\") " pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.531155 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.573305 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-config-data-custom\") pod \"cinder-api-0\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " pod="openstack/cinder-api-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.573634 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcwc9\" (UniqueName: \"kubernetes.io/projected/f1e2c1d6-848c-419d-964d-a9ba0bb41313-kube-api-access-pcwc9\") pod \"cinder-api-0\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " pod="openstack/cinder-api-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.573670 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " pod="openstack/cinder-api-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.573740 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-config-data\") pod \"cinder-api-0\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " pod="openstack/cinder-api-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.573821 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1e2c1d6-848c-419d-964d-a9ba0bb41313-logs\") pod \"cinder-api-0\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " pod="openstack/cinder-api-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.573899 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-scripts\") pod \"cinder-api-0\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " pod="openstack/cinder-api-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.573953 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f1e2c1d6-848c-419d-964d-a9ba0bb41313-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " pod="openstack/cinder-api-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.574069 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f1e2c1d6-848c-419d-964d-a9ba0bb41313-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " pod="openstack/cinder-api-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.574467 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1e2c1d6-848c-419d-964d-a9ba0bb41313-logs\") pod \"cinder-api-0\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " pod="openstack/cinder-api-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.579174 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-config-data-custom\") pod \"cinder-api-0\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " pod="openstack/cinder-api-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.580085 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-scripts\") pod \"cinder-api-0\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " pod="openstack/cinder-api-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.581016 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-config-data\") pod \"cinder-api-0\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " pod="openstack/cinder-api-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.585501 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " pod="openstack/cinder-api-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.595843 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcwc9\" (UniqueName: \"kubernetes.io/projected/f1e2c1d6-848c-419d-964d-a9ba0bb41313-kube-api-access-pcwc9\") pod \"cinder-api-0\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " pod="openstack/cinder-api-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.768661 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.835942 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-66466f9898-mzt77" event={"ID":"45d8f6aa-887f-444b-81c8-7bf6c03993c9","Type":"ContainerStarted","Data":"7eff2f1e87b8492e8940c53d5afc8748f86124ec76ec391c6784cc81c39859a8"} Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.840194 4869 generic.go:334] "Generic (PLEG): container finished" podID="daad33e6-c765-4fcb-94e2-9ff5f244cffd" containerID="20a5ae9eaf2b66f2f1df0666ff21ba3418d92dddb85973fa770a763f5ec11565" exitCode=0 Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.840313 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-6hhjj" event={"ID":"daad33e6-c765-4fcb-94e2-9ff5f244cffd","Type":"ContainerDied","Data":"20a5ae9eaf2b66f2f1df0666ff21ba3418d92dddb85973fa770a763f5ec11565"} Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.840354 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-6hhjj" event={"ID":"daad33e6-c765-4fcb-94e2-9ff5f244cffd","Type":"ContainerStarted","Data":"dd5383d8997fb9e22f6b38797bb82699ca5fc978f27c2f3610c0d7a06b2894be"} Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.867194 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6444c94f66-mzq6m" event={"ID":"d1567b59-87d2-4eea-925a-d1b9d8a27e24","Type":"ContainerStarted","Data":"d3bd540a97575f65cb911b5dc7295f39c75d0af8a03199fa2415e0626e39d362"} Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.867252 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6444c94f66-mzq6m" event={"ID":"d1567b59-87d2-4eea-925a-d1b9d8a27e24","Type":"ContainerStarted","Data":"e68dbf428c48496c204441fa5ed5a633f8050a2e84a5392482a12bf48f69428a"} Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.867267 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6444c94f66-mzq6m" event={"ID":"d1567b59-87d2-4eea-925a-d1b9d8a27e24","Type":"ContainerStarted","Data":"0cdd2a878674f28a4955f6ff1bb8d955b8cc1387febf79fc2dbe657dbd52626a"} Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.867626 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6444c94f66-mzq6m" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.867656 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6444c94f66-mzq6m" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.873625 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-65d8584cdc-bgnk8" event={"ID":"104ca851-1c21-41bd-8a92-423fdab83753","Type":"ContainerStarted","Data":"370b9adeeba2894047536a2b4dc7374948fe222b74d0629f4f697dc3e699ff8f"} Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.901511 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6444c94f66-mzq6m" podStartSLOduration=1.901487545 podStartE2EDuration="1.901487545s" podCreationTimestamp="2026-01-30 11:13:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:13:01.89495326 +0000 UTC m=+1132.444829326" watchObservedRunningTime="2026-01-30 11:13:01.901487545 +0000 UTC m=+1132.451363611" Jan 30 11:13:01 crc kubenswrapper[4869]: I0130 11:13:01.937542 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.077172 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-gxkk6"] Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.298821 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-6hhjj" Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.390835 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-ovsdbserver-nb\") pod \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\" (UID: \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\") " Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.390959 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-dns-swift-storage-0\") pod \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\" (UID: \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\") " Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.390991 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-dns-svc\") pod \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\" (UID: \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\") " Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.391089 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-stjml\" (UniqueName: \"kubernetes.io/projected/daad33e6-c765-4fcb-94e2-9ff5f244cffd-kube-api-access-stjml\") pod \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\" (UID: \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\") " Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.391172 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-config\") pod \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\" (UID: \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\") " Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.391192 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-ovsdbserver-sb\") pod \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\" (UID: \"daad33e6-c765-4fcb-94e2-9ff5f244cffd\") " Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.427984 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/daad33e6-c765-4fcb-94e2-9ff5f244cffd-kube-api-access-stjml" (OuterVolumeSpecName: "kube-api-access-stjml") pod "daad33e6-c765-4fcb-94e2-9ff5f244cffd" (UID: "daad33e6-c765-4fcb-94e2-9ff5f244cffd"). InnerVolumeSpecName "kube-api-access-stjml". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.446946 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "daad33e6-c765-4fcb-94e2-9ff5f244cffd" (UID: "daad33e6-c765-4fcb-94e2-9ff5f244cffd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.459262 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "daad33e6-c765-4fcb-94e2-9ff5f244cffd" (UID: "daad33e6-c765-4fcb-94e2-9ff5f244cffd"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.482582 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.491870 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "daad33e6-c765-4fcb-94e2-9ff5f244cffd" (UID: "daad33e6-c765-4fcb-94e2-9ff5f244cffd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.498081 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "daad33e6-c765-4fcb-94e2-9ff5f244cffd" (UID: "daad33e6-c765-4fcb-94e2-9ff5f244cffd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.500861 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.500890 4869 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.500903 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.500915 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-stjml\" (UniqueName: \"kubernetes.io/projected/daad33e6-c765-4fcb-94e2-9ff5f244cffd-kube-api-access-stjml\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.500925 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.521160 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-config" (OuterVolumeSpecName: "config") pod "daad33e6-c765-4fcb-94e2-9ff5f244cffd" (UID: "daad33e6-c765-4fcb-94e2-9ff5f244cffd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.603668 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daad33e6-c765-4fcb-94e2-9ff5f244cffd-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.883482 4869 generic.go:334] "Generic (PLEG): container finished" podID="c07567e1-764d-4544-8a1d-ae6826672ae1" containerID="de9f0ff315708328e77fc504f37cfb6fe20e3555902d2d578bc2f35abc90c375" exitCode=0 Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.883566 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" event={"ID":"c07567e1-764d-4544-8a1d-ae6826672ae1","Type":"ContainerDied","Data":"de9f0ff315708328e77fc504f37cfb6fe20e3555902d2d578bc2f35abc90c375"} Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.883649 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" event={"ID":"c07567e1-764d-4544-8a1d-ae6826672ae1","Type":"ContainerStarted","Data":"33a22f4df738bdd79eb6fb93de8e8f205945c14ae368919b68f268597ecda891"} Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.888194 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-6hhjj" event={"ID":"daad33e6-c765-4fcb-94e2-9ff5f244cffd","Type":"ContainerDied","Data":"dd5383d8997fb9e22f6b38797bb82699ca5fc978f27c2f3610c0d7a06b2894be"} Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.888264 4869 scope.go:117] "RemoveContainer" containerID="20a5ae9eaf2b66f2f1df0666ff21ba3418d92dddb85973fa770a763f5ec11565" Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.888403 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-6hhjj" Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.891855 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9daa784c-29c0-4086-a318-a9c4b73a6244","Type":"ContainerStarted","Data":"72b996ccf7537939fb0250606855c08a21d1eda8b6dffc1cbb839601c43c86aa"} Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.962829 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-6hhjj"] Jan 30 11:13:02 crc kubenswrapper[4869]: I0130 11:13:02.983790 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-6hhjj"] Jan 30 11:13:02 crc kubenswrapper[4869]: W0130 11:13:02.990045 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf1e2c1d6_848c_419d_964d_a9ba0bb41313.slice/crio-35a7e9734d7e8f54d154a0f38e3786f38ce6299ff9eeb46d41ff1de18d98dcd7 WatchSource:0}: Error finding container 35a7e9734d7e8f54d154a0f38e3786f38ce6299ff9eeb46d41ff1de18d98dcd7: Status 404 returned error can't find the container with id 35a7e9734d7e8f54d154a0f38e3786f38ce6299ff9eeb46d41ff1de18d98dcd7 Jan 30 11:13:03 crc kubenswrapper[4869]: I0130 11:13:03.929201 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f1e2c1d6-848c-419d-964d-a9ba0bb41313","Type":"ContainerStarted","Data":"35a7e9734d7e8f54d154a0f38e3786f38ce6299ff9eeb46d41ff1de18d98dcd7"} Jan 30 11:13:03 crc kubenswrapper[4869]: I0130 11:13:03.934900 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" Jan 30 11:13:03 crc kubenswrapper[4869]: I0130 11:13:03.964964 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" podStartSLOduration=2.964933512 podStartE2EDuration="2.964933512s" podCreationTimestamp="2026-01-30 11:13:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:13:03.958060786 +0000 UTC m=+1134.507936852" watchObservedRunningTime="2026-01-30 11:13:03.964933512 +0000 UTC m=+1134.514809588" Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.144296 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="daad33e6-c765-4fcb-94e2-9ff5f244cffd" path="/var/lib/kubelet/pods/daad33e6-c765-4fcb-94e2-9ff5f244cffd/volumes" Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.619700 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.654384 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46a44949-0829-489c-8baf-31966a61641f-combined-ca-bundle\") pod \"46a44949-0829-489c-8baf-31966a61641f\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.654460 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-54jm7\" (UniqueName: \"kubernetes.io/projected/46a44949-0829-489c-8baf-31966a61641f-kube-api-access-54jm7\") pod \"46a44949-0829-489c-8baf-31966a61641f\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.654517 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46a44949-0829-489c-8baf-31966a61641f-run-httpd\") pod \"46a44949-0829-489c-8baf-31966a61641f\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.654635 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46a44949-0829-489c-8baf-31966a61641f-scripts\") pod \"46a44949-0829-489c-8baf-31966a61641f\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.654690 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46a44949-0829-489c-8baf-31966a61641f-log-httpd\") pod \"46a44949-0829-489c-8baf-31966a61641f\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.654774 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/46a44949-0829-489c-8baf-31966a61641f-sg-core-conf-yaml\") pod \"46a44949-0829-489c-8baf-31966a61641f\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.654842 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46a44949-0829-489c-8baf-31966a61641f-config-data\") pod \"46a44949-0829-489c-8baf-31966a61641f\" (UID: \"46a44949-0829-489c-8baf-31966a61641f\") " Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.655197 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46a44949-0829-489c-8baf-31966a61641f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "46a44949-0829-489c-8baf-31966a61641f" (UID: "46a44949-0829-489c-8baf-31966a61641f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.655499 4869 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46a44949-0829-489c-8baf-31966a61641f-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.655632 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46a44949-0829-489c-8baf-31966a61641f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "46a44949-0829-489c-8baf-31966a61641f" (UID: "46a44949-0829-489c-8baf-31966a61641f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.667573 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46a44949-0829-489c-8baf-31966a61641f-scripts" (OuterVolumeSpecName: "scripts") pod "46a44949-0829-489c-8baf-31966a61641f" (UID: "46a44949-0829-489c-8baf-31966a61641f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.677500 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46a44949-0829-489c-8baf-31966a61641f-kube-api-access-54jm7" (OuterVolumeSpecName: "kube-api-access-54jm7") pod "46a44949-0829-489c-8baf-31966a61641f" (UID: "46a44949-0829-489c-8baf-31966a61641f"). InnerVolumeSpecName "kube-api-access-54jm7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.709832 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46a44949-0829-489c-8baf-31966a61641f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "46a44949-0829-489c-8baf-31966a61641f" (UID: "46a44949-0829-489c-8baf-31966a61641f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.761630 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-54jm7\" (UniqueName: \"kubernetes.io/projected/46a44949-0829-489c-8baf-31966a61641f-kube-api-access-54jm7\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.761670 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46a44949-0829-489c-8baf-31966a61641f-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.761699 4869 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46a44949-0829-489c-8baf-31966a61641f-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.761732 4869 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/46a44949-0829-489c-8baf-31966a61641f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.762878 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46a44949-0829-489c-8baf-31966a61641f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "46a44949-0829-489c-8baf-31966a61641f" (UID: "46a44949-0829-489c-8baf-31966a61641f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.798284 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46a44949-0829-489c-8baf-31966a61641f-config-data" (OuterVolumeSpecName: "config-data") pod "46a44949-0829-489c-8baf-31966a61641f" (UID: "46a44949-0829-489c-8baf-31966a61641f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.863467 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46a44949-0829-489c-8baf-31966a61641f-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.863517 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46a44949-0829-489c-8baf-31966a61641f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.958345 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9daa784c-29c0-4086-a318-a9c4b73a6244","Type":"ContainerStarted","Data":"f873fa320ba8d23f9d9f8cd350401d79fa435047fbc7f4e862e74e274b55b00f"} Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.961539 4869 generic.go:334] "Generic (PLEG): container finished" podID="46a44949-0829-489c-8baf-31966a61641f" containerID="ef2be8647a33a563b522128ed9461507b42ca0e66e663c90aec39048781728d9" exitCode=0 Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.961611 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"46a44949-0829-489c-8baf-31966a61641f","Type":"ContainerDied","Data":"ef2be8647a33a563b522128ed9461507b42ca0e66e663c90aec39048781728d9"} Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.961613 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.961652 4869 scope.go:117] "RemoveContainer" containerID="6575c82b56ef2a8f339dd3823558a1701e499dd058b41adb6d86d82b00df3e22" Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.961640 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"46a44949-0829-489c-8baf-31966a61641f","Type":"ContainerDied","Data":"60a185752ceaea7bccb9614da6fe38adbc410d356eb5cf9ab9f868b9e633d7a3"} Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.963922 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f1e2c1d6-848c-419d-964d-a9ba0bb41313","Type":"ContainerStarted","Data":"12b0f5bfe371c51e27d345f1ab4bf0ace9a114d910dae03a894363963b8af070"} Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.968935 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" event={"ID":"c07567e1-764d-4544-8a1d-ae6826672ae1","Type":"ContainerStarted","Data":"ee129ebc9616d6e4b06ec77c365e76fc8306fc182d048ddb5f5c047976036814"} Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.975171 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-65d8584cdc-bgnk8" event={"ID":"104ca851-1c21-41bd-8a92-423fdab83753","Type":"ContainerStarted","Data":"c1377e5a89b886398734afab4df613f251b21e463da188354d9a29304432a1e2"} Jan 30 11:13:04 crc kubenswrapper[4869]: I0130 11:13:04.975832 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-65d8584cdc-bgnk8" event={"ID":"104ca851-1c21-41bd-8a92-423fdab83753","Type":"ContainerStarted","Data":"21d82ca792ac006fe155b7cbdd156ead01161c135d9e86487b0d642dfa345111"} Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.009625 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-66466f9898-mzt77" event={"ID":"45d8f6aa-887f-444b-81c8-7bf6c03993c9","Type":"ContainerStarted","Data":"e9752fd0d18f235bdd601cdc37759bd12b8f72d28d609ba99e7c988552e2f109"} Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.009693 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-66466f9898-mzt77" event={"ID":"45d8f6aa-887f-444b-81c8-7bf6c03993c9","Type":"ContainerStarted","Data":"c09c667d7f6ac623e362575529ea4aab1bd220f1c6756e69b8ca1ef9977354ae"} Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.045700 4869 scope.go:117] "RemoveContainer" containerID="9ff07e90770d898c4b45efedf3a39d283ddced6f78bc7b59dd02d8279f528a0e" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.056568 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-65d8584cdc-bgnk8" podStartSLOduration=3.6664373059999997 podStartE2EDuration="6.056541403s" podCreationTimestamp="2026-01-30 11:12:59 +0000 UTC" firstStartedPulling="2026-01-30 11:13:01.0891701 +0000 UTC m=+1131.639046176" lastFinishedPulling="2026-01-30 11:13:03.479274197 +0000 UTC m=+1134.029150273" observedRunningTime="2026-01-30 11:13:05.03317785 +0000 UTC m=+1135.583053916" watchObservedRunningTime="2026-01-30 11:13:05.056541403 +0000 UTC m=+1135.606417469" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.066703 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-66466f9898-mzt77" podStartSLOduration=3.573187179 podStartE2EDuration="6.066686311s" podCreationTimestamp="2026-01-30 11:12:59 +0000 UTC" firstStartedPulling="2026-01-30 11:13:00.985777765 +0000 UTC m=+1131.535653831" lastFinishedPulling="2026-01-30 11:13:03.479276897 +0000 UTC m=+1134.029152963" observedRunningTime="2026-01-30 11:13:05.064212081 +0000 UTC m=+1135.614088157" watchObservedRunningTime="2026-01-30 11:13:05.066686311 +0000 UTC m=+1135.616562377" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.113376 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.117959 4869 scope.go:117] "RemoveContainer" containerID="ef2be8647a33a563b522128ed9461507b42ca0e66e663c90aec39048781728d9" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.133114 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.147261 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:13:05 crc kubenswrapper[4869]: E0130 11:13:05.147771 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46a44949-0829-489c-8baf-31966a61641f" containerName="ceilometer-notification-agent" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.147793 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="46a44949-0829-489c-8baf-31966a61641f" containerName="ceilometer-notification-agent" Jan 30 11:13:05 crc kubenswrapper[4869]: E0130 11:13:05.147818 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46a44949-0829-489c-8baf-31966a61641f" containerName="ceilometer-central-agent" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.147826 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="46a44949-0829-489c-8baf-31966a61641f" containerName="ceilometer-central-agent" Jan 30 11:13:05 crc kubenswrapper[4869]: E0130 11:13:05.147846 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46a44949-0829-489c-8baf-31966a61641f" containerName="sg-core" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.147853 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="46a44949-0829-489c-8baf-31966a61641f" containerName="sg-core" Jan 30 11:13:05 crc kubenswrapper[4869]: E0130 11:13:05.147868 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daad33e6-c765-4fcb-94e2-9ff5f244cffd" containerName="init" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.147876 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="daad33e6-c765-4fcb-94e2-9ff5f244cffd" containerName="init" Jan 30 11:13:05 crc kubenswrapper[4869]: E0130 11:13:05.147897 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46a44949-0829-489c-8baf-31966a61641f" containerName="proxy-httpd" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.147905 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="46a44949-0829-489c-8baf-31966a61641f" containerName="proxy-httpd" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.148153 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="46a44949-0829-489c-8baf-31966a61641f" containerName="proxy-httpd" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.148180 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="46a44949-0829-489c-8baf-31966a61641f" containerName="ceilometer-notification-agent" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.148195 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="46a44949-0829-489c-8baf-31966a61641f" containerName="sg-core" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.148213 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="daad33e6-c765-4fcb-94e2-9ff5f244cffd" containerName="init" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.148225 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="46a44949-0829-489c-8baf-31966a61641f" containerName="ceilometer-central-agent" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.150349 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.153796 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.154846 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.157505 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.173297 4869 scope.go:117] "RemoveContainer" containerID="653553e1c2623f2d21168f37e3a4fd911742e0cfccadfdd017b3ae36afa31bdb" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.233473 4869 scope.go:117] "RemoveContainer" containerID="6575c82b56ef2a8f339dd3823558a1701e499dd058b41adb6d86d82b00df3e22" Jan 30 11:13:05 crc kubenswrapper[4869]: E0130 11:13:05.235007 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6575c82b56ef2a8f339dd3823558a1701e499dd058b41adb6d86d82b00df3e22\": container with ID starting with 6575c82b56ef2a8f339dd3823558a1701e499dd058b41adb6d86d82b00df3e22 not found: ID does not exist" containerID="6575c82b56ef2a8f339dd3823558a1701e499dd058b41adb6d86d82b00df3e22" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.235041 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6575c82b56ef2a8f339dd3823558a1701e499dd058b41adb6d86d82b00df3e22"} err="failed to get container status \"6575c82b56ef2a8f339dd3823558a1701e499dd058b41adb6d86d82b00df3e22\": rpc error: code = NotFound desc = could not find container \"6575c82b56ef2a8f339dd3823558a1701e499dd058b41adb6d86d82b00df3e22\": container with ID starting with 6575c82b56ef2a8f339dd3823558a1701e499dd058b41adb6d86d82b00df3e22 not found: ID does not exist" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.235064 4869 scope.go:117] "RemoveContainer" containerID="9ff07e90770d898c4b45efedf3a39d283ddced6f78bc7b59dd02d8279f528a0e" Jan 30 11:13:05 crc kubenswrapper[4869]: E0130 11:13:05.235483 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ff07e90770d898c4b45efedf3a39d283ddced6f78bc7b59dd02d8279f528a0e\": container with ID starting with 9ff07e90770d898c4b45efedf3a39d283ddced6f78bc7b59dd02d8279f528a0e not found: ID does not exist" containerID="9ff07e90770d898c4b45efedf3a39d283ddced6f78bc7b59dd02d8279f528a0e" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.235517 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ff07e90770d898c4b45efedf3a39d283ddced6f78bc7b59dd02d8279f528a0e"} err="failed to get container status \"9ff07e90770d898c4b45efedf3a39d283ddced6f78bc7b59dd02d8279f528a0e\": rpc error: code = NotFound desc = could not find container \"9ff07e90770d898c4b45efedf3a39d283ddced6f78bc7b59dd02d8279f528a0e\": container with ID starting with 9ff07e90770d898c4b45efedf3a39d283ddced6f78bc7b59dd02d8279f528a0e not found: ID does not exist" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.235537 4869 scope.go:117] "RemoveContainer" containerID="ef2be8647a33a563b522128ed9461507b42ca0e66e663c90aec39048781728d9" Jan 30 11:13:05 crc kubenswrapper[4869]: E0130 11:13:05.235823 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef2be8647a33a563b522128ed9461507b42ca0e66e663c90aec39048781728d9\": container with ID starting with ef2be8647a33a563b522128ed9461507b42ca0e66e663c90aec39048781728d9 not found: ID does not exist" containerID="ef2be8647a33a563b522128ed9461507b42ca0e66e663c90aec39048781728d9" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.235843 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef2be8647a33a563b522128ed9461507b42ca0e66e663c90aec39048781728d9"} err="failed to get container status \"ef2be8647a33a563b522128ed9461507b42ca0e66e663c90aec39048781728d9\": rpc error: code = NotFound desc = could not find container \"ef2be8647a33a563b522128ed9461507b42ca0e66e663c90aec39048781728d9\": container with ID starting with ef2be8647a33a563b522128ed9461507b42ca0e66e663c90aec39048781728d9 not found: ID does not exist" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.235856 4869 scope.go:117] "RemoveContainer" containerID="653553e1c2623f2d21168f37e3a4fd911742e0cfccadfdd017b3ae36afa31bdb" Jan 30 11:13:05 crc kubenswrapper[4869]: E0130 11:13:05.236060 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"653553e1c2623f2d21168f37e3a4fd911742e0cfccadfdd017b3ae36afa31bdb\": container with ID starting with 653553e1c2623f2d21168f37e3a4fd911742e0cfccadfdd017b3ae36afa31bdb not found: ID does not exist" containerID="653553e1c2623f2d21168f37e3a4fd911742e0cfccadfdd017b3ae36afa31bdb" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.236081 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"653553e1c2623f2d21168f37e3a4fd911742e0cfccadfdd017b3ae36afa31bdb"} err="failed to get container status \"653553e1c2623f2d21168f37e3a4fd911742e0cfccadfdd017b3ae36afa31bdb\": rpc error: code = NotFound desc = could not find container \"653553e1c2623f2d21168f37e3a4fd911742e0cfccadfdd017b3ae36afa31bdb\": container with ID starting with 653553e1c2623f2d21168f37e3a4fd911742e0cfccadfdd017b3ae36afa31bdb not found: ID does not exist" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.272441 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c96d522-04a0-49df-a9a8-dc050f71c013-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " pod="openstack/ceilometer-0" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.272519 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c96d522-04a0-49df-a9a8-dc050f71c013-run-httpd\") pod \"ceilometer-0\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " pod="openstack/ceilometer-0" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.272545 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c96d522-04a0-49df-a9a8-dc050f71c013-config-data\") pod \"ceilometer-0\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " pod="openstack/ceilometer-0" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.272580 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c96d522-04a0-49df-a9a8-dc050f71c013-scripts\") pod \"ceilometer-0\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " pod="openstack/ceilometer-0" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.272630 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxs5m\" (UniqueName: \"kubernetes.io/projected/5c96d522-04a0-49df-a9a8-dc050f71c013-kube-api-access-nxs5m\") pod \"ceilometer-0\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " pod="openstack/ceilometer-0" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.272664 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c96d522-04a0-49df-a9a8-dc050f71c013-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " pod="openstack/ceilometer-0" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.272694 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c96d522-04a0-49df-a9a8-dc050f71c013-log-httpd\") pod \"ceilometer-0\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " pod="openstack/ceilometer-0" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.322213 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.374466 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxs5m\" (UniqueName: \"kubernetes.io/projected/5c96d522-04a0-49df-a9a8-dc050f71c013-kube-api-access-nxs5m\") pod \"ceilometer-0\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " pod="openstack/ceilometer-0" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.374518 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c96d522-04a0-49df-a9a8-dc050f71c013-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " pod="openstack/ceilometer-0" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.374545 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c96d522-04a0-49df-a9a8-dc050f71c013-log-httpd\") pod \"ceilometer-0\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " pod="openstack/ceilometer-0" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.374678 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c96d522-04a0-49df-a9a8-dc050f71c013-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " pod="openstack/ceilometer-0" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.374725 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c96d522-04a0-49df-a9a8-dc050f71c013-run-httpd\") pod \"ceilometer-0\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " pod="openstack/ceilometer-0" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.374747 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c96d522-04a0-49df-a9a8-dc050f71c013-config-data\") pod \"ceilometer-0\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " pod="openstack/ceilometer-0" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.374776 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c96d522-04a0-49df-a9a8-dc050f71c013-scripts\") pod \"ceilometer-0\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " pod="openstack/ceilometer-0" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.375920 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c96d522-04a0-49df-a9a8-dc050f71c013-log-httpd\") pod \"ceilometer-0\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " pod="openstack/ceilometer-0" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.377122 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c96d522-04a0-49df-a9a8-dc050f71c013-run-httpd\") pod \"ceilometer-0\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " pod="openstack/ceilometer-0" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.380408 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c96d522-04a0-49df-a9a8-dc050f71c013-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " pod="openstack/ceilometer-0" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.382753 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c96d522-04a0-49df-a9a8-dc050f71c013-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " pod="openstack/ceilometer-0" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.383655 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c96d522-04a0-49df-a9a8-dc050f71c013-scripts\") pod \"ceilometer-0\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " pod="openstack/ceilometer-0" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.384956 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c96d522-04a0-49df-a9a8-dc050f71c013-config-data\") pod \"ceilometer-0\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " pod="openstack/ceilometer-0" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.396543 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxs5m\" (UniqueName: \"kubernetes.io/projected/5c96d522-04a0-49df-a9a8-dc050f71c013-kube-api-access-nxs5m\") pod \"ceilometer-0\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " pod="openstack/ceilometer-0" Jan 30 11:13:05 crc kubenswrapper[4869]: I0130 11:13:05.515429 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.050935 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9daa784c-29c0-4086-a318-a9c4b73a6244","Type":"ContainerStarted","Data":"b8416fd82af26e4b8e46969b9702c5c091e33ca5e7bb1dd31f84ee3b8ca552cc"} Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.072971 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f1e2c1d6-848c-419d-964d-a9ba0bb41313","Type":"ContainerStarted","Data":"e5f886f29dfaf9fc37a3be1355fe3de6f91a4a91db39a25108d5179557985d50"} Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.073016 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.098443 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.488666635 podStartE2EDuration="6.098424735s" podCreationTimestamp="2026-01-30 11:13:00 +0000 UTC" firstStartedPulling="2026-01-30 11:13:01.954441308 +0000 UTC m=+1132.504317374" lastFinishedPulling="2026-01-30 11:13:03.564199408 +0000 UTC m=+1134.114075474" observedRunningTime="2026-01-30 11:13:06.085949231 +0000 UTC m=+1136.635825317" watchObservedRunningTime="2026-01-30 11:13:06.098424735 +0000 UTC m=+1136.648300801" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.107901 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.121986 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.121962893 podStartE2EDuration="5.121962893s" podCreationTimestamp="2026-01-30 11:13:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:13:06.119221785 +0000 UTC m=+1136.669097861" watchObservedRunningTime="2026-01-30 11:13:06.121962893 +0000 UTC m=+1136.671838959" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.245682 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46a44949-0829-489c-8baf-31966a61641f" path="/var/lib/kubelet/pods/46a44949-0829-489c-8baf-31966a61641f/volumes" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.371911 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.661032 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-9fb998c86-5qb5j"] Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.662616 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.664909 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.672118 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.693873 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-9fb998c86-5qb5j"] Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.716277 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-internal-tls-certs\") pod \"barbican-api-9fb998c86-5qb5j\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.716337 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-public-tls-certs\") pod \"barbican-api-9fb998c86-5qb5j\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.716373 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-config-data-custom\") pod \"barbican-api-9fb998c86-5qb5j\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.716397 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74632136-6311-4daa-80c7-4c32c20d6a4a-logs\") pod \"barbican-api-9fb998c86-5qb5j\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.716446 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-combined-ca-bundle\") pod \"barbican-api-9fb998c86-5qb5j\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.716469 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tknc\" (UniqueName: \"kubernetes.io/projected/74632136-6311-4daa-80c7-4c32c20d6a4a-kube-api-access-4tknc\") pod \"barbican-api-9fb998c86-5qb5j\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.716526 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-config-data\") pod \"barbican-api-9fb998c86-5qb5j\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.818513 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-config-data\") pod \"barbican-api-9fb998c86-5qb5j\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.818647 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-internal-tls-certs\") pod \"barbican-api-9fb998c86-5qb5j\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.818677 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-public-tls-certs\") pod \"barbican-api-9fb998c86-5qb5j\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.818735 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-config-data-custom\") pod \"barbican-api-9fb998c86-5qb5j\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.818765 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74632136-6311-4daa-80c7-4c32c20d6a4a-logs\") pod \"barbican-api-9fb998c86-5qb5j\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.818789 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-combined-ca-bundle\") pod \"barbican-api-9fb998c86-5qb5j\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.818817 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tknc\" (UniqueName: \"kubernetes.io/projected/74632136-6311-4daa-80c7-4c32c20d6a4a-kube-api-access-4tknc\") pod \"barbican-api-9fb998c86-5qb5j\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.822201 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74632136-6311-4daa-80c7-4c32c20d6a4a-logs\") pod \"barbican-api-9fb998c86-5qb5j\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.824655 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-config-data-custom\") pod \"barbican-api-9fb998c86-5qb5j\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.825100 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-config-data\") pod \"barbican-api-9fb998c86-5qb5j\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.826149 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-public-tls-certs\") pod \"barbican-api-9fb998c86-5qb5j\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.826470 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-combined-ca-bundle\") pod \"barbican-api-9fb998c86-5qb5j\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.841116 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-internal-tls-certs\") pod \"barbican-api-9fb998c86-5qb5j\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.843789 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tknc\" (UniqueName: \"kubernetes.io/projected/74632136-6311-4daa-80c7-4c32c20d6a4a-kube-api-access-4tknc\") pod \"barbican-api-9fb998c86-5qb5j\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:06 crc kubenswrapper[4869]: I0130 11:13:06.992754 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:07 crc kubenswrapper[4869]: I0130 11:13:07.084598 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c96d522-04a0-49df-a9a8-dc050f71c013","Type":"ContainerStarted","Data":"a2755119c1c666678412543710423174a385aa2323b32ce68990ba2fe8b39c85"} Jan 30 11:13:07 crc kubenswrapper[4869]: I0130 11:13:07.084920 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="f1e2c1d6-848c-419d-964d-a9ba0bb41313" containerName="cinder-api-log" containerID="cri-o://12b0f5bfe371c51e27d345f1ab4bf0ace9a114d910dae03a894363963b8af070" gracePeriod=30 Jan 30 11:13:07 crc kubenswrapper[4869]: I0130 11:13:07.085051 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="f1e2c1d6-848c-419d-964d-a9ba0bb41313" containerName="cinder-api" containerID="cri-o://e5f886f29dfaf9fc37a3be1355fe3de6f91a4a91db39a25108d5179557985d50" gracePeriod=30 Jan 30 11:13:07 crc kubenswrapper[4869]: I0130 11:13:07.503134 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-9fb998c86-5qb5j"] Jan 30 11:13:07 crc kubenswrapper[4869]: W0130 11:13:07.527953 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod74632136_6311_4daa_80c7_4c32c20d6a4a.slice/crio-35003dedbc247aed0f9c674563b373d14134f54452e5ef2576060b1927a3c1e7 WatchSource:0}: Error finding container 35003dedbc247aed0f9c674563b373d14134f54452e5ef2576060b1927a3c1e7: Status 404 returned error can't find the container with id 35003dedbc247aed0f9c674563b373d14134f54452e5ef2576060b1927a3c1e7 Jan 30 11:13:07 crc kubenswrapper[4869]: I0130 11:13:07.915690 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 30 11:13:07 crc kubenswrapper[4869]: I0130 11:13:07.990060 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f1e2c1d6-848c-419d-964d-a9ba0bb41313-etc-machine-id\") pod \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " Jan 30 11:13:07 crc kubenswrapper[4869]: I0130 11:13:07.990450 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-combined-ca-bundle\") pod \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " Jan 30 11:13:07 crc kubenswrapper[4869]: I0130 11:13:07.990483 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-config-data-custom\") pod \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " Jan 30 11:13:07 crc kubenswrapper[4869]: I0130 11:13:07.990532 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-scripts\") pod \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " Jan 30 11:13:07 crc kubenswrapper[4869]: I0130 11:13:07.990562 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcwc9\" (UniqueName: \"kubernetes.io/projected/f1e2c1d6-848c-419d-964d-a9ba0bb41313-kube-api-access-pcwc9\") pod \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " Jan 30 11:13:07 crc kubenswrapper[4869]: I0130 11:13:07.990676 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1e2c1d6-848c-419d-964d-a9ba0bb41313-logs\") pod \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " Jan 30 11:13:07 crc kubenswrapper[4869]: I0130 11:13:07.990701 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-config-data\") pod \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " Jan 30 11:13:07 crc kubenswrapper[4869]: I0130 11:13:07.991531 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f1e2c1d6-848c-419d-964d-a9ba0bb41313-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f1e2c1d6-848c-419d-964d-a9ba0bb41313" (UID: "f1e2c1d6-848c-419d-964d-a9ba0bb41313"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:13:07 crc kubenswrapper[4869]: I0130 11:13:07.995336 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1e2c1d6-848c-419d-964d-a9ba0bb41313-logs" (OuterVolumeSpecName: "logs") pod "f1e2c1d6-848c-419d-964d-a9ba0bb41313" (UID: "f1e2c1d6-848c-419d-964d-a9ba0bb41313"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.003964 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-scripts" (OuterVolumeSpecName: "scripts") pod "f1e2c1d6-848c-419d-964d-a9ba0bb41313" (UID: "f1e2c1d6-848c-419d-964d-a9ba0bb41313"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.004385 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f1e2c1d6-848c-419d-964d-a9ba0bb41313" (UID: "f1e2c1d6-848c-419d-964d-a9ba0bb41313"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.011458 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1e2c1d6-848c-419d-964d-a9ba0bb41313-kube-api-access-pcwc9" (OuterVolumeSpecName: "kube-api-access-pcwc9") pod "f1e2c1d6-848c-419d-964d-a9ba0bb41313" (UID: "f1e2c1d6-848c-419d-964d-a9ba0bb41313"). InnerVolumeSpecName "kube-api-access-pcwc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:13:08 crc kubenswrapper[4869]: E0130 11:13:08.067211 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-combined-ca-bundle podName:f1e2c1d6-848c-419d-964d-a9ba0bb41313 nodeName:}" failed. No retries permitted until 2026-01-30 11:13:08.566914774 +0000 UTC m=+1139.116790840 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-combined-ca-bundle") pod "f1e2c1d6-848c-419d-964d-a9ba0bb41313" (UID: "f1e2c1d6-848c-419d-964d-a9ba0bb41313") : error deleting /var/lib/kubelet/pods/f1e2c1d6-848c-419d-964d-a9ba0bb41313/volume-subpaths: remove /var/lib/kubelet/pods/f1e2c1d6-848c-419d-964d-a9ba0bb41313/volume-subpaths: no such file or directory Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.071271 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-config-data" (OuterVolumeSpecName: "config-data") pod "f1e2c1d6-848c-419d-964d-a9ba0bb41313" (UID: "f1e2c1d6-848c-419d-964d-a9ba0bb41313"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.092593 4869 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f1e2c1d6-848c-419d-964d-a9ba0bb41313-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.092629 4869 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.092639 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.092648 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcwc9\" (UniqueName: \"kubernetes.io/projected/f1e2c1d6-848c-419d-964d-a9ba0bb41313-kube-api-access-pcwc9\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.092658 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1e2c1d6-848c-419d-964d-a9ba0bb41313-logs\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.092667 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.104527 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-9fb998c86-5qb5j" event={"ID":"74632136-6311-4daa-80c7-4c32c20d6a4a","Type":"ContainerStarted","Data":"5c3a077c91d9559f388be79a584f5f0cc2987551bdedbe1808365a487d0f5ea3"} Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.104596 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-9fb998c86-5qb5j" event={"ID":"74632136-6311-4daa-80c7-4c32c20d6a4a","Type":"ContainerStarted","Data":"35003dedbc247aed0f9c674563b373d14134f54452e5ef2576060b1927a3c1e7"} Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.114845 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c96d522-04a0-49df-a9a8-dc050f71c013","Type":"ContainerStarted","Data":"09832317b501fab85b36ef81db3ac822f4d1175b57d021d0bdbff1cd40358777"} Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.114898 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c96d522-04a0-49df-a9a8-dc050f71c013","Type":"ContainerStarted","Data":"c4177dc801127480d4f05284eedb2be6ebfdb2908c611e359348f5b2bac15d52"} Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.121926 4869 generic.go:334] "Generic (PLEG): container finished" podID="f1e2c1d6-848c-419d-964d-a9ba0bb41313" containerID="e5f886f29dfaf9fc37a3be1355fe3de6f91a4a91db39a25108d5179557985d50" exitCode=0 Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.121953 4869 generic.go:334] "Generic (PLEG): container finished" podID="f1e2c1d6-848c-419d-964d-a9ba0bb41313" containerID="12b0f5bfe371c51e27d345f1ab4bf0ace9a114d910dae03a894363963b8af070" exitCode=143 Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.122855 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.123355 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f1e2c1d6-848c-419d-964d-a9ba0bb41313","Type":"ContainerDied","Data":"e5f886f29dfaf9fc37a3be1355fe3de6f91a4a91db39a25108d5179557985d50"} Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.123381 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f1e2c1d6-848c-419d-964d-a9ba0bb41313","Type":"ContainerDied","Data":"12b0f5bfe371c51e27d345f1ab4bf0ace9a114d910dae03a894363963b8af070"} Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.123391 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f1e2c1d6-848c-419d-964d-a9ba0bb41313","Type":"ContainerDied","Data":"35a7e9734d7e8f54d154a0f38e3786f38ce6299ff9eeb46d41ff1de18d98dcd7"} Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.123405 4869 scope.go:117] "RemoveContainer" containerID="e5f886f29dfaf9fc37a3be1355fe3de6f91a4a91db39a25108d5179557985d50" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.174847 4869 scope.go:117] "RemoveContainer" containerID="12b0f5bfe371c51e27d345f1ab4bf0ace9a114d910dae03a894363963b8af070" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.203669 4869 scope.go:117] "RemoveContainer" containerID="e5f886f29dfaf9fc37a3be1355fe3de6f91a4a91db39a25108d5179557985d50" Jan 30 11:13:08 crc kubenswrapper[4869]: E0130 11:13:08.205000 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5f886f29dfaf9fc37a3be1355fe3de6f91a4a91db39a25108d5179557985d50\": container with ID starting with e5f886f29dfaf9fc37a3be1355fe3de6f91a4a91db39a25108d5179557985d50 not found: ID does not exist" containerID="e5f886f29dfaf9fc37a3be1355fe3de6f91a4a91db39a25108d5179557985d50" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.205135 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5f886f29dfaf9fc37a3be1355fe3de6f91a4a91db39a25108d5179557985d50"} err="failed to get container status \"e5f886f29dfaf9fc37a3be1355fe3de6f91a4a91db39a25108d5179557985d50\": rpc error: code = NotFound desc = could not find container \"e5f886f29dfaf9fc37a3be1355fe3de6f91a4a91db39a25108d5179557985d50\": container with ID starting with e5f886f29dfaf9fc37a3be1355fe3de6f91a4a91db39a25108d5179557985d50 not found: ID does not exist" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.205261 4869 scope.go:117] "RemoveContainer" containerID="12b0f5bfe371c51e27d345f1ab4bf0ace9a114d910dae03a894363963b8af070" Jan 30 11:13:08 crc kubenswrapper[4869]: E0130 11:13:08.206005 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12b0f5bfe371c51e27d345f1ab4bf0ace9a114d910dae03a894363963b8af070\": container with ID starting with 12b0f5bfe371c51e27d345f1ab4bf0ace9a114d910dae03a894363963b8af070 not found: ID does not exist" containerID="12b0f5bfe371c51e27d345f1ab4bf0ace9a114d910dae03a894363963b8af070" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.206120 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12b0f5bfe371c51e27d345f1ab4bf0ace9a114d910dae03a894363963b8af070"} err="failed to get container status \"12b0f5bfe371c51e27d345f1ab4bf0ace9a114d910dae03a894363963b8af070\": rpc error: code = NotFound desc = could not find container \"12b0f5bfe371c51e27d345f1ab4bf0ace9a114d910dae03a894363963b8af070\": container with ID starting with 12b0f5bfe371c51e27d345f1ab4bf0ace9a114d910dae03a894363963b8af070 not found: ID does not exist" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.206204 4869 scope.go:117] "RemoveContainer" containerID="e5f886f29dfaf9fc37a3be1355fe3de6f91a4a91db39a25108d5179557985d50" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.208832 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5f886f29dfaf9fc37a3be1355fe3de6f91a4a91db39a25108d5179557985d50"} err="failed to get container status \"e5f886f29dfaf9fc37a3be1355fe3de6f91a4a91db39a25108d5179557985d50\": rpc error: code = NotFound desc = could not find container \"e5f886f29dfaf9fc37a3be1355fe3de6f91a4a91db39a25108d5179557985d50\": container with ID starting with e5f886f29dfaf9fc37a3be1355fe3de6f91a4a91db39a25108d5179557985d50 not found: ID does not exist" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.208887 4869 scope.go:117] "RemoveContainer" containerID="12b0f5bfe371c51e27d345f1ab4bf0ace9a114d910dae03a894363963b8af070" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.209634 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12b0f5bfe371c51e27d345f1ab4bf0ace9a114d910dae03a894363963b8af070"} err="failed to get container status \"12b0f5bfe371c51e27d345f1ab4bf0ace9a114d910dae03a894363963b8af070\": rpc error: code = NotFound desc = could not find container \"12b0f5bfe371c51e27d345f1ab4bf0ace9a114d910dae03a894363963b8af070\": container with ID starting with 12b0f5bfe371c51e27d345f1ab4bf0ace9a114d910dae03a894363963b8af070 not found: ID does not exist" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.558133 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-6444c94f66-mzq6m" podUID="d1567b59-87d2-4eea-925a-d1b9d8a27e24" containerName="barbican-api-log" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.602279 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-combined-ca-bundle\") pod \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\" (UID: \"f1e2c1d6-848c-419d-964d-a9ba0bb41313\") " Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.610885 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f1e2c1d6-848c-419d-964d-a9ba0bb41313" (UID: "f1e2c1d6-848c-419d-964d-a9ba0bb41313"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.705435 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1e2c1d6-848c-419d-964d-a9ba0bb41313-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.817698 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.826199 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.846456 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jan 30 11:13:08 crc kubenswrapper[4869]: E0130 11:13:08.846903 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1e2c1d6-848c-419d-964d-a9ba0bb41313" containerName="cinder-api-log" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.846923 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1e2c1d6-848c-419d-964d-a9ba0bb41313" containerName="cinder-api-log" Jan 30 11:13:08 crc kubenswrapper[4869]: E0130 11:13:08.846940 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1e2c1d6-848c-419d-964d-a9ba0bb41313" containerName="cinder-api" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.846946 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1e2c1d6-848c-419d-964d-a9ba0bb41313" containerName="cinder-api" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.847135 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1e2c1d6-848c-419d-964d-a9ba0bb41313" containerName="cinder-api" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.847151 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1e2c1d6-848c-419d-964d-a9ba0bb41313" containerName="cinder-api-log" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.848103 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.852835 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.853038 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.854824 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Jan 30 11:13:08 crc kubenswrapper[4869]: I0130 11:13:08.859198 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.012840 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-scripts\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.013139 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.013176 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-public-tls-certs\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.013196 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjqk4\" (UniqueName: \"kubernetes.io/projected/65d95395-5aea-4546-b12a-ec8ce58ec704-kube-api-access-qjqk4\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.013258 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.013307 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65d95395-5aea-4546-b12a-ec8ce58ec704-logs\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.013473 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/65d95395-5aea-4546-b12a-ec8ce58ec704-etc-machine-id\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.013559 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-config-data-custom\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.013742 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-config-data\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.115656 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65d95395-5aea-4546-b12a-ec8ce58ec704-logs\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.116767 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/65d95395-5aea-4546-b12a-ec8ce58ec704-etc-machine-id\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.116842 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/65d95395-5aea-4546-b12a-ec8ce58ec704-etc-machine-id\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.116247 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65d95395-5aea-4546-b12a-ec8ce58ec704-logs\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.117001 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-config-data-custom\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.117134 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-config-data\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.117331 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-scripts\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.117416 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.117542 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-public-tls-certs\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.117738 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjqk4\" (UniqueName: \"kubernetes.io/projected/65d95395-5aea-4546-b12a-ec8ce58ec704-kube-api-access-qjqk4\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.118098 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.121332 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-public-tls-certs\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.122275 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.122285 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-scripts\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.122808 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-config-data\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.122891 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-config-data-custom\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.132366 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.135510 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjqk4\" (UniqueName: \"kubernetes.io/projected/65d95395-5aea-4546-b12a-ec8ce58ec704-kube-api-access-qjqk4\") pod \"cinder-api-0\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.140493 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c96d522-04a0-49df-a9a8-dc050f71c013","Type":"ContainerStarted","Data":"8325878f717fad44d46d05bf7a50da4416b33e2e79185d6fc54d42d9a8709030"} Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.144128 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-9fb998c86-5qb5j" event={"ID":"74632136-6311-4daa-80c7-4c32c20d6a4a","Type":"ContainerStarted","Data":"92f17e6ef177f1d7c2a6e4d1a20a973d7e9064773ac8ae9ff622cf49961a940b"} Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.144409 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.144457 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.175732 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.612352 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-9fb998c86-5qb5j" podStartSLOduration=3.612334948 podStartE2EDuration="3.612334948s" podCreationTimestamp="2026-01-30 11:13:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:13:09.165185126 +0000 UTC m=+1139.715061192" watchObservedRunningTime="2026-01-30 11:13:09.612334948 +0000 UTC m=+1140.162211014" Jan 30 11:13:09 crc kubenswrapper[4869]: I0130 11:13:09.619450 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 30 11:13:10 crc kubenswrapper[4869]: I0130 11:13:10.184039 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1e2c1d6-848c-419d-964d-a9ba0bb41313" path="/var/lib/kubelet/pods/f1e2c1d6-848c-419d-964d-a9ba0bb41313/volumes" Jan 30 11:13:10 crc kubenswrapper[4869]: I0130 11:13:10.185268 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"65d95395-5aea-4546-b12a-ec8ce58ec704","Type":"ContainerStarted","Data":"285bbd2e68eb2b69460784c677c91f293ed93525c17faea8c5f055f2b074fdbc"} Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.057760 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6c54c84574-hxb8h" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.216048 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c96d522-04a0-49df-a9a8-dc050f71c013","Type":"ContainerStarted","Data":"51983a9333b068ad5d372df323d63b6590131ffeece45df7f80a65e2622fa70d"} Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.217662 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.231724 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"65d95395-5aea-4546-b12a-ec8ce58ec704","Type":"ContainerStarted","Data":"2d8084a7cade6c321549a9768cbcd158ea761ca605ab57b0333c081b4ad26652"} Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.231766 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"65d95395-5aea-4546-b12a-ec8ce58ec704","Type":"ContainerStarted","Data":"f379626d704637c04266aaeddc7b56416c9104be8850dbe4859f262fe2550259"} Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.232547 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.275368 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.646836989 podStartE2EDuration="6.275325537s" podCreationTimestamp="2026-01-30 11:13:05 +0000 UTC" firstStartedPulling="2026-01-30 11:13:06.139768398 +0000 UTC m=+1136.689644464" lastFinishedPulling="2026-01-30 11:13:10.768256946 +0000 UTC m=+1141.318133012" observedRunningTime="2026-01-30 11:13:11.242925997 +0000 UTC m=+1141.792802073" watchObservedRunningTime="2026-01-30 11:13:11.275325537 +0000 UTC m=+1141.825201603" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.283456 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.283425007 podStartE2EDuration="3.283425007s" podCreationTimestamp="2026-01-30 11:13:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:13:11.266908968 +0000 UTC m=+1141.816785044" watchObservedRunningTime="2026-01-30 11:13:11.283425007 +0000 UTC m=+1141.833301293" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.340101 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6b94756495-f44c6"] Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.340529 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6b94756495-f44c6" podUID="36d9c803-e141-42ec-a169-66838e70db68" containerName="neutron-api" containerID="cri-o://b5b80d71d4c52800890ddf23dc8a54d41d125816b930b86402cf8924ae35084c" gracePeriod=30 Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.341547 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6b94756495-f44c6" podUID="36d9c803-e141-42ec-a169-66838e70db68" containerName="neutron-httpd" containerID="cri-o://9c8a94bbfc90e47a8d2c00ce2fcbb49a27c39f7b15d444037af8085e4789dd3a" gracePeriod=30 Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.381869 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-df6cb98f-8s46w"] Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.382870 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-6b94756495-f44c6" podUID="36d9c803-e141-42ec-a169-66838e70db68" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.154:9696/\": EOF" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.385459 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.395171 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-df6cb98f-8s46w"] Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.487032 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5c2wl\" (UniqueName: \"kubernetes.io/projected/484ba6c3-20dc-4b27-b7f5-901eef0643a7-kube-api-access-5c2wl\") pod \"neutron-df6cb98f-8s46w\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.487081 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-internal-tls-certs\") pod \"neutron-df6cb98f-8s46w\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.487110 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-ovndb-tls-certs\") pod \"neutron-df6cb98f-8s46w\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.487147 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-combined-ca-bundle\") pod \"neutron-df6cb98f-8s46w\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.487169 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-public-tls-certs\") pod \"neutron-df6cb98f-8s46w\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.487199 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-httpd-config\") pod \"neutron-df6cb98f-8s46w\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.487240 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-config\") pod \"neutron-df6cb98f-8s46w\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.532887 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.588678 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5c2wl\" (UniqueName: \"kubernetes.io/projected/484ba6c3-20dc-4b27-b7f5-901eef0643a7-kube-api-access-5c2wl\") pod \"neutron-df6cb98f-8s46w\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.588738 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-internal-tls-certs\") pod \"neutron-df6cb98f-8s46w\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.588766 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-ovndb-tls-certs\") pod \"neutron-df6cb98f-8s46w\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.588802 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-combined-ca-bundle\") pod \"neutron-df6cb98f-8s46w\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.588824 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-public-tls-certs\") pod \"neutron-df6cb98f-8s46w\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.588852 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-httpd-config\") pod \"neutron-df6cb98f-8s46w\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.588877 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-config\") pod \"neutron-df6cb98f-8s46w\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.598657 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-internal-tls-certs\") pod \"neutron-df6cb98f-8s46w\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.606653 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-ovndb-tls-certs\") pod \"neutron-df6cb98f-8s46w\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.614134 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-2ngc6"] Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.614464 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" podUID="d9ea9c68-a482-490f-97cd-35545cea0e42" containerName="dnsmasq-dns" containerID="cri-o://8c823d27e22f4a9ac0ca832f9154506c43529d6c92299562f66dac8caad8714b" gracePeriod=10 Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.616863 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-public-tls-certs\") pod \"neutron-df6cb98f-8s46w\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.618926 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-combined-ca-bundle\") pod \"neutron-df6cb98f-8s46w\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.621651 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-config\") pod \"neutron-df6cb98f-8s46w\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.638784 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-httpd-config\") pod \"neutron-df6cb98f-8s46w\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.649126 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5c2wl\" (UniqueName: \"kubernetes.io/projected/484ba6c3-20dc-4b27-b7f5-901eef0643a7-kube-api-access-5c2wl\") pod \"neutron-df6cb98f-8s46w\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.717025 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.758386 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jan 30 11:13:11 crc kubenswrapper[4869]: I0130 11:13:11.804851 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.189553 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.245041 4869 generic.go:334] "Generic (PLEG): container finished" podID="d9ea9c68-a482-490f-97cd-35545cea0e42" containerID="8c823d27e22f4a9ac0ca832f9154506c43529d6c92299562f66dac8caad8714b" exitCode=0 Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.245446 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" event={"ID":"d9ea9c68-a482-490f-97cd-35545cea0e42","Type":"ContainerDied","Data":"8c823d27e22f4a9ac0ca832f9154506c43529d6c92299562f66dac8caad8714b"} Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.245477 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" event={"ID":"d9ea9c68-a482-490f-97cd-35545cea0e42","Type":"ContainerDied","Data":"23240a5cbe7f86942d12b9db64820b949059b4cc21d50cc7fe0e2678eb186e32"} Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.245495 4869 scope.go:117] "RemoveContainer" containerID="8c823d27e22f4a9ac0ca832f9154506c43529d6c92299562f66dac8caad8714b" Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.245634 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-2ngc6" Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.255802 4869 generic.go:334] "Generic (PLEG): container finished" podID="36d9c803-e141-42ec-a169-66838e70db68" containerID="9c8a94bbfc90e47a8d2c00ce2fcbb49a27c39f7b15d444037af8085e4789dd3a" exitCode=0 Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.255884 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b94756495-f44c6" event={"ID":"36d9c803-e141-42ec-a169-66838e70db68","Type":"ContainerDied","Data":"9c8a94bbfc90e47a8d2c00ce2fcbb49a27c39f7b15d444037af8085e4789dd3a"} Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.256104 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="9daa784c-29c0-4086-a318-a9c4b73a6244" containerName="cinder-scheduler" containerID="cri-o://f873fa320ba8d23f9d9f8cd350401d79fa435047fbc7f4e862e74e274b55b00f" gracePeriod=30 Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.257811 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="9daa784c-29c0-4086-a318-a9c4b73a6244" containerName="probe" containerID="cri-o://b8416fd82af26e4b8e46969b9702c5c091e33ca5e7bb1dd31f84ee3b8ca552cc" gracePeriod=30 Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.289699 4869 scope.go:117] "RemoveContainer" containerID="e76b6d419f6127e43ab00abb9f4a5dd7d9288f95e4fb58822118ba2a20fe511c" Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.306959 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-dns-svc\") pod \"d9ea9c68-a482-490f-97cd-35545cea0e42\" (UID: \"d9ea9c68-a482-490f-97cd-35545cea0e42\") " Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.307024 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-ovsdbserver-nb\") pod \"d9ea9c68-a482-490f-97cd-35545cea0e42\" (UID: \"d9ea9c68-a482-490f-97cd-35545cea0e42\") " Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.307125 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-config\") pod \"d9ea9c68-a482-490f-97cd-35545cea0e42\" (UID: \"d9ea9c68-a482-490f-97cd-35545cea0e42\") " Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.307299 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-dns-swift-storage-0\") pod \"d9ea9c68-a482-490f-97cd-35545cea0e42\" (UID: \"d9ea9c68-a482-490f-97cd-35545cea0e42\") " Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.307325 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7fhwl\" (UniqueName: \"kubernetes.io/projected/d9ea9c68-a482-490f-97cd-35545cea0e42-kube-api-access-7fhwl\") pod \"d9ea9c68-a482-490f-97cd-35545cea0e42\" (UID: \"d9ea9c68-a482-490f-97cd-35545cea0e42\") " Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.307365 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-ovsdbserver-sb\") pod \"d9ea9c68-a482-490f-97cd-35545cea0e42\" (UID: \"d9ea9c68-a482-490f-97cd-35545cea0e42\") " Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.313523 4869 scope.go:117] "RemoveContainer" containerID="8c823d27e22f4a9ac0ca832f9154506c43529d6c92299562f66dac8caad8714b" Jan 30 11:13:12 crc kubenswrapper[4869]: E0130 11:13:12.315642 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c823d27e22f4a9ac0ca832f9154506c43529d6c92299562f66dac8caad8714b\": container with ID starting with 8c823d27e22f4a9ac0ca832f9154506c43529d6c92299562f66dac8caad8714b not found: ID does not exist" containerID="8c823d27e22f4a9ac0ca832f9154506c43529d6c92299562f66dac8caad8714b" Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.315736 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c823d27e22f4a9ac0ca832f9154506c43529d6c92299562f66dac8caad8714b"} err="failed to get container status \"8c823d27e22f4a9ac0ca832f9154506c43529d6c92299562f66dac8caad8714b\": rpc error: code = NotFound desc = could not find container \"8c823d27e22f4a9ac0ca832f9154506c43529d6c92299562f66dac8caad8714b\": container with ID starting with 8c823d27e22f4a9ac0ca832f9154506c43529d6c92299562f66dac8caad8714b not found: ID does not exist" Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.315781 4869 scope.go:117] "RemoveContainer" containerID="e76b6d419f6127e43ab00abb9f4a5dd7d9288f95e4fb58822118ba2a20fe511c" Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.316003 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9ea9c68-a482-490f-97cd-35545cea0e42-kube-api-access-7fhwl" (OuterVolumeSpecName: "kube-api-access-7fhwl") pod "d9ea9c68-a482-490f-97cd-35545cea0e42" (UID: "d9ea9c68-a482-490f-97cd-35545cea0e42"). InnerVolumeSpecName "kube-api-access-7fhwl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:13:12 crc kubenswrapper[4869]: E0130 11:13:12.319401 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e76b6d419f6127e43ab00abb9f4a5dd7d9288f95e4fb58822118ba2a20fe511c\": container with ID starting with e76b6d419f6127e43ab00abb9f4a5dd7d9288f95e4fb58822118ba2a20fe511c not found: ID does not exist" containerID="e76b6d419f6127e43ab00abb9f4a5dd7d9288f95e4fb58822118ba2a20fe511c" Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.319763 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e76b6d419f6127e43ab00abb9f4a5dd7d9288f95e4fb58822118ba2a20fe511c"} err="failed to get container status \"e76b6d419f6127e43ab00abb9f4a5dd7d9288f95e4fb58822118ba2a20fe511c\": rpc error: code = NotFound desc = could not find container \"e76b6d419f6127e43ab00abb9f4a5dd7d9288f95e4fb58822118ba2a20fe511c\": container with ID starting with e76b6d419f6127e43ab00abb9f4a5dd7d9288f95e4fb58822118ba2a20fe511c not found: ID does not exist" Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.385557 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-config" (OuterVolumeSpecName: "config") pod "d9ea9c68-a482-490f-97cd-35545cea0e42" (UID: "d9ea9c68-a482-490f-97cd-35545cea0e42"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.398069 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d9ea9c68-a482-490f-97cd-35545cea0e42" (UID: "d9ea9c68-a482-490f-97cd-35545cea0e42"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.400591 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d9ea9c68-a482-490f-97cd-35545cea0e42" (UID: "d9ea9c68-a482-490f-97cd-35545cea0e42"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.410792 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.410827 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7fhwl\" (UniqueName: \"kubernetes.io/projected/d9ea9c68-a482-490f-97cd-35545cea0e42-kube-api-access-7fhwl\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.410844 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.410854 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.414931 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d9ea9c68-a482-490f-97cd-35545cea0e42" (UID: "d9ea9c68-a482-490f-97cd-35545cea0e42"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.421190 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d9ea9c68-a482-490f-97cd-35545cea0e42" (UID: "d9ea9c68-a482-490f-97cd-35545cea0e42"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.478791 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-df6cb98f-8s46w"] Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.497658 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6444c94f66-mzq6m" Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.514513 4869 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.514561 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d9ea9c68-a482-490f-97cd-35545cea0e42-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.591759 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6444c94f66-mzq6m" Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.599067 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-2ngc6"] Jan 30 11:13:12 crc kubenswrapper[4869]: I0130 11:13:12.612073 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-2ngc6"] Jan 30 11:13:13 crc kubenswrapper[4869]: I0130 11:13:13.287243 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-df6cb98f-8s46w" event={"ID":"484ba6c3-20dc-4b27-b7f5-901eef0643a7","Type":"ContainerStarted","Data":"a5570db7baf6da4c91df79cef463e37a3e6477f10e7ccd7079f3bf311e981158"} Jan 30 11:13:13 crc kubenswrapper[4869]: I0130 11:13:13.288102 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-df6cb98f-8s46w" event={"ID":"484ba6c3-20dc-4b27-b7f5-901eef0643a7","Type":"ContainerStarted","Data":"a381b048e1d6b21ec22cf7214a1b8e39fa926bed88e85c72ebe8ab5f7bfa2c5b"} Jan 30 11:13:13 crc kubenswrapper[4869]: I0130 11:13:13.288116 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-df6cb98f-8s46w" event={"ID":"484ba6c3-20dc-4b27-b7f5-901eef0643a7","Type":"ContainerStarted","Data":"306ab68261b0c4da8e178e5d159061441e37413a08b256b72bbea8705ac02494"} Jan 30 11:13:13 crc kubenswrapper[4869]: I0130 11:13:13.289101 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:13:13 crc kubenswrapper[4869]: I0130 11:13:13.308269 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-df6cb98f-8s46w" podStartSLOduration=2.308250916 podStartE2EDuration="2.308250916s" podCreationTimestamp="2026-01-30 11:13:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:13:13.306519757 +0000 UTC m=+1143.856395823" watchObservedRunningTime="2026-01-30 11:13:13.308250916 +0000 UTC m=+1143.858126982" Jan 30 11:13:14 crc kubenswrapper[4869]: I0130 11:13:14.143394 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9ea9c68-a482-490f-97cd-35545cea0e42" path="/var/lib/kubelet/pods/d9ea9c68-a482-490f-97cd-35545cea0e42/volumes" Jan 30 11:13:14 crc kubenswrapper[4869]: I0130 11:13:14.295138 4869 generic.go:334] "Generic (PLEG): container finished" podID="9daa784c-29c0-4086-a318-a9c4b73a6244" containerID="b8416fd82af26e4b8e46969b9702c5c091e33ca5e7bb1dd31f84ee3b8ca552cc" exitCode=0 Jan 30 11:13:14 crc kubenswrapper[4869]: I0130 11:13:14.295210 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9daa784c-29c0-4086-a318-a9c4b73a6244","Type":"ContainerDied","Data":"b8416fd82af26e4b8e46969b9702c5c091e33ca5e7bb1dd31f84ee3b8ca552cc"} Jan 30 11:13:14 crc kubenswrapper[4869]: I0130 11:13:14.332413 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-6b94756495-f44c6" podUID="36d9c803-e141-42ec-a169-66838e70db68" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.154:9696/\": dial tcp 10.217.0.154:9696: connect: connection refused" Jan 30 11:13:15 crc kubenswrapper[4869]: I0130 11:13:15.162142 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.090425 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.345524 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-587f84cd84-zqhxn"] Jan 30 11:13:16 crc kubenswrapper[4869]: E0130 11:13:16.346029 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9ea9c68-a482-490f-97cd-35545cea0e42" containerName="dnsmasq-dns" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.346050 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9ea9c68-a482-490f-97cd-35545cea0e42" containerName="dnsmasq-dns" Jan 30 11:13:16 crc kubenswrapper[4869]: E0130 11:13:16.346066 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9ea9c68-a482-490f-97cd-35545cea0e42" containerName="init" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.346073 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9ea9c68-a482-490f-97cd-35545cea0e42" containerName="init" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.346245 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9ea9c68-a482-490f-97cd-35545cea0e42" containerName="dnsmasq-dns" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.348440 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.378435 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-587f84cd84-zqhxn"] Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.396812 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/323f79a2-48c7-4768-8707-23bc31755a50-logs\") pod \"placement-587f84cd84-zqhxn\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.396872 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-config-data\") pod \"placement-587f84cd84-zqhxn\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.396897 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-scripts\") pod \"placement-587f84cd84-zqhxn\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.396956 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-internal-tls-certs\") pod \"placement-587f84cd84-zqhxn\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.397012 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-px658\" (UniqueName: \"kubernetes.io/projected/323f79a2-48c7-4768-8707-23bc31755a50-kube-api-access-px658\") pod \"placement-587f84cd84-zqhxn\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.397068 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-combined-ca-bundle\") pod \"placement-587f84cd84-zqhxn\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.397102 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-public-tls-certs\") pod \"placement-587f84cd84-zqhxn\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.498828 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/323f79a2-48c7-4768-8707-23bc31755a50-logs\") pod \"placement-587f84cd84-zqhxn\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.498884 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-config-data\") pod \"placement-587f84cd84-zqhxn\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.498907 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-scripts\") pod \"placement-587f84cd84-zqhxn\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.498958 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-internal-tls-certs\") pod \"placement-587f84cd84-zqhxn\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.499008 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-px658\" (UniqueName: \"kubernetes.io/projected/323f79a2-48c7-4768-8707-23bc31755a50-kube-api-access-px658\") pod \"placement-587f84cd84-zqhxn\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.499053 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-combined-ca-bundle\") pod \"placement-587f84cd84-zqhxn\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.499078 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-public-tls-certs\") pod \"placement-587f84cd84-zqhxn\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.505928 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-scripts\") pod \"placement-587f84cd84-zqhxn\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.506324 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/323f79a2-48c7-4768-8707-23bc31755a50-logs\") pod \"placement-587f84cd84-zqhxn\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.508316 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-public-tls-certs\") pod \"placement-587f84cd84-zqhxn\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.509253 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-internal-tls-certs\") pod \"placement-587f84cd84-zqhxn\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.509861 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-combined-ca-bundle\") pod \"placement-587f84cd84-zqhxn\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.519995 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-config-data\") pod \"placement-587f84cd84-zqhxn\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.523546 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-px658\" (UniqueName: \"kubernetes.io/projected/323f79a2-48c7-4768-8707-23bc31755a50-kube-api-access-px658\") pod \"placement-587f84cd84-zqhxn\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:16 crc kubenswrapper[4869]: I0130 11:13:16.684814 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.082413 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.116522 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9daa784c-29c0-4086-a318-a9c4b73a6244-config-data\") pod \"9daa784c-29c0-4086-a318-a9c4b73a6244\" (UID: \"9daa784c-29c0-4086-a318-a9c4b73a6244\") " Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.116610 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9daa784c-29c0-4086-a318-a9c4b73a6244-config-data-custom\") pod \"9daa784c-29c0-4086-a318-a9c4b73a6244\" (UID: \"9daa784c-29c0-4086-a318-a9c4b73a6244\") " Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.116649 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t659s\" (UniqueName: \"kubernetes.io/projected/9daa784c-29c0-4086-a318-a9c4b73a6244-kube-api-access-t659s\") pod \"9daa784c-29c0-4086-a318-a9c4b73a6244\" (UID: \"9daa784c-29c0-4086-a318-a9c4b73a6244\") " Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.116936 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9daa784c-29c0-4086-a318-a9c4b73a6244-combined-ca-bundle\") pod \"9daa784c-29c0-4086-a318-a9c4b73a6244\" (UID: \"9daa784c-29c0-4086-a318-a9c4b73a6244\") " Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.117007 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9daa784c-29c0-4086-a318-a9c4b73a6244-scripts\") pod \"9daa784c-29c0-4086-a318-a9c4b73a6244\" (UID: \"9daa784c-29c0-4086-a318-a9c4b73a6244\") " Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.117046 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9daa784c-29c0-4086-a318-a9c4b73a6244-etc-machine-id\") pod \"9daa784c-29c0-4086-a318-a9c4b73a6244\" (UID: \"9daa784c-29c0-4086-a318-a9c4b73a6244\") " Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.117682 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9daa784c-29c0-4086-a318-a9c4b73a6244-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "9daa784c-29c0-4086-a318-a9c4b73a6244" (UID: "9daa784c-29c0-4086-a318-a9c4b73a6244"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.135966 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9daa784c-29c0-4086-a318-a9c4b73a6244-scripts" (OuterVolumeSpecName: "scripts") pod "9daa784c-29c0-4086-a318-a9c4b73a6244" (UID: "9daa784c-29c0-4086-a318-a9c4b73a6244"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.154442 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9daa784c-29c0-4086-a318-a9c4b73a6244-kube-api-access-t659s" (OuterVolumeSpecName: "kube-api-access-t659s") pod "9daa784c-29c0-4086-a318-a9c4b73a6244" (UID: "9daa784c-29c0-4086-a318-a9c4b73a6244"). InnerVolumeSpecName "kube-api-access-t659s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.160921 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9daa784c-29c0-4086-a318-a9c4b73a6244-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9daa784c-29c0-4086-a318-a9c4b73a6244" (UID: "9daa784c-29c0-4086-a318-a9c4b73a6244"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.228829 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9daa784c-29c0-4086-a318-a9c4b73a6244-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.228862 4869 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9daa784c-29c0-4086-a318-a9c4b73a6244-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.228890 4869 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9daa784c-29c0-4086-a318-a9c4b73a6244-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.228901 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t659s\" (UniqueName: \"kubernetes.io/projected/9daa784c-29c0-4086-a318-a9c4b73a6244-kube-api-access-t659s\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.247252 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9daa784c-29c0-4086-a318-a9c4b73a6244-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9daa784c-29c0-4086-a318-a9c4b73a6244" (UID: "9daa784c-29c0-4086-a318-a9c4b73a6244"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.266885 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9daa784c-29c0-4086-a318-a9c4b73a6244-config-data" (OuterVolumeSpecName: "config-data") pod "9daa784c-29c0-4086-a318-a9c4b73a6244" (UID: "9daa784c-29c0-4086-a318-a9c4b73a6244"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.331956 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9daa784c-29c0-4086-a318-a9c4b73a6244-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.331994 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9daa784c-29c0-4086-a318-a9c4b73a6244-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.355704 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-587f84cd84-zqhxn"] Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.378742 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-587f84cd84-zqhxn" event={"ID":"323f79a2-48c7-4768-8707-23bc31755a50","Type":"ContainerStarted","Data":"9e132362e5111dcd2f186519d8e686be77ef125488330c262e029b9f99383b2c"} Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.380280 4869 generic.go:334] "Generic (PLEG): container finished" podID="9daa784c-29c0-4086-a318-a9c4b73a6244" containerID="f873fa320ba8d23f9d9f8cd350401d79fa435047fbc7f4e862e74e274b55b00f" exitCode=0 Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.380309 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9daa784c-29c0-4086-a318-a9c4b73a6244","Type":"ContainerDied","Data":"f873fa320ba8d23f9d9f8cd350401d79fa435047fbc7f4e862e74e274b55b00f"} Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.380336 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9daa784c-29c0-4086-a318-a9c4b73a6244","Type":"ContainerDied","Data":"72b996ccf7537939fb0250606855c08a21d1eda8b6dffc1cbb839601c43c86aa"} Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.380354 4869 scope.go:117] "RemoveContainer" containerID="b8416fd82af26e4b8e46969b9702c5c091e33ca5e7bb1dd31f84ee3b8ca552cc" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.380487 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.430522 4869 scope.go:117] "RemoveContainer" containerID="f873fa320ba8d23f9d9f8cd350401d79fa435047fbc7f4e862e74e274b55b00f" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.449933 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.492374 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.512757 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jan 30 11:13:17 crc kubenswrapper[4869]: E0130 11:13:17.513693 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9daa784c-29c0-4086-a318-a9c4b73a6244" containerName="probe" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.513721 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="9daa784c-29c0-4086-a318-a9c4b73a6244" containerName="probe" Jan 30 11:13:17 crc kubenswrapper[4869]: E0130 11:13:17.513748 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9daa784c-29c0-4086-a318-a9c4b73a6244" containerName="cinder-scheduler" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.513758 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="9daa784c-29c0-4086-a318-a9c4b73a6244" containerName="cinder-scheduler" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.513976 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="9daa784c-29c0-4086-a318-a9c4b73a6244" containerName="cinder-scheduler" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.513989 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="9daa784c-29c0-4086-a318-a9c4b73a6244" containerName="probe" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.515203 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.520051 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.534452 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.549232 4869 scope.go:117] "RemoveContainer" containerID="b8416fd82af26e4b8e46969b9702c5c091e33ca5e7bb1dd31f84ee3b8ca552cc" Jan 30 11:13:17 crc kubenswrapper[4869]: E0130 11:13:17.552151 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8416fd82af26e4b8e46969b9702c5c091e33ca5e7bb1dd31f84ee3b8ca552cc\": container with ID starting with b8416fd82af26e4b8e46969b9702c5c091e33ca5e7bb1dd31f84ee3b8ca552cc not found: ID does not exist" containerID="b8416fd82af26e4b8e46969b9702c5c091e33ca5e7bb1dd31f84ee3b8ca552cc" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.552531 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8416fd82af26e4b8e46969b9702c5c091e33ca5e7bb1dd31f84ee3b8ca552cc"} err="failed to get container status \"b8416fd82af26e4b8e46969b9702c5c091e33ca5e7bb1dd31f84ee3b8ca552cc\": rpc error: code = NotFound desc = could not find container \"b8416fd82af26e4b8e46969b9702c5c091e33ca5e7bb1dd31f84ee3b8ca552cc\": container with ID starting with b8416fd82af26e4b8e46969b9702c5c091e33ca5e7bb1dd31f84ee3b8ca552cc not found: ID does not exist" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.552609 4869 scope.go:117] "RemoveContainer" containerID="f873fa320ba8d23f9d9f8cd350401d79fa435047fbc7f4e862e74e274b55b00f" Jan 30 11:13:17 crc kubenswrapper[4869]: E0130 11:13:17.553458 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f873fa320ba8d23f9d9f8cd350401d79fa435047fbc7f4e862e74e274b55b00f\": container with ID starting with f873fa320ba8d23f9d9f8cd350401d79fa435047fbc7f4e862e74e274b55b00f not found: ID does not exist" containerID="f873fa320ba8d23f9d9f8cd350401d79fa435047fbc7f4e862e74e274b55b00f" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.553526 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f873fa320ba8d23f9d9f8cd350401d79fa435047fbc7f4e862e74e274b55b00f"} err="failed to get container status \"f873fa320ba8d23f9d9f8cd350401d79fa435047fbc7f4e862e74e274b55b00f\": rpc error: code = NotFound desc = could not find container \"f873fa320ba8d23f9d9f8cd350401d79fa435047fbc7f4e862e74e274b55b00f\": container with ID starting with f873fa320ba8d23f9d9f8cd350401d79fa435047fbc7f4e862e74e274b55b00f not found: ID does not exist" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.638008 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/161960a2-9537-4f72-913b-54b23f2b4be7-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"161960a2-9537-4f72-913b-54b23f2b4be7\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.638087 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/161960a2-9537-4f72-913b-54b23f2b4be7-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"161960a2-9537-4f72-913b-54b23f2b4be7\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.638158 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/161960a2-9537-4f72-913b-54b23f2b4be7-config-data\") pod \"cinder-scheduler-0\" (UID: \"161960a2-9537-4f72-913b-54b23f2b4be7\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.638185 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/161960a2-9537-4f72-913b-54b23f2b4be7-scripts\") pod \"cinder-scheduler-0\" (UID: \"161960a2-9537-4f72-913b-54b23f2b4be7\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.638301 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxwkn\" (UniqueName: \"kubernetes.io/projected/161960a2-9537-4f72-913b-54b23f2b4be7-kube-api-access-wxwkn\") pod \"cinder-scheduler-0\" (UID: \"161960a2-9537-4f72-913b-54b23f2b4be7\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.638374 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/161960a2-9537-4f72-913b-54b23f2b4be7-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"161960a2-9537-4f72-913b-54b23f2b4be7\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.742191 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/161960a2-9537-4f72-913b-54b23f2b4be7-config-data\") pod \"cinder-scheduler-0\" (UID: \"161960a2-9537-4f72-913b-54b23f2b4be7\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.742247 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/161960a2-9537-4f72-913b-54b23f2b4be7-scripts\") pod \"cinder-scheduler-0\" (UID: \"161960a2-9537-4f72-913b-54b23f2b4be7\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.742337 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxwkn\" (UniqueName: \"kubernetes.io/projected/161960a2-9537-4f72-913b-54b23f2b4be7-kube-api-access-wxwkn\") pod \"cinder-scheduler-0\" (UID: \"161960a2-9537-4f72-913b-54b23f2b4be7\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.742367 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/161960a2-9537-4f72-913b-54b23f2b4be7-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"161960a2-9537-4f72-913b-54b23f2b4be7\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.742401 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/161960a2-9537-4f72-913b-54b23f2b4be7-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"161960a2-9537-4f72-913b-54b23f2b4be7\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.742432 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/161960a2-9537-4f72-913b-54b23f2b4be7-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"161960a2-9537-4f72-913b-54b23f2b4be7\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.746343 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/161960a2-9537-4f72-913b-54b23f2b4be7-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"161960a2-9537-4f72-913b-54b23f2b4be7\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.748543 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/161960a2-9537-4f72-913b-54b23f2b4be7-config-data\") pod \"cinder-scheduler-0\" (UID: \"161960a2-9537-4f72-913b-54b23f2b4be7\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.752675 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/161960a2-9537-4f72-913b-54b23f2b4be7-scripts\") pod \"cinder-scheduler-0\" (UID: \"161960a2-9537-4f72-913b-54b23f2b4be7\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.755143 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/161960a2-9537-4f72-913b-54b23f2b4be7-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"161960a2-9537-4f72-913b-54b23f2b4be7\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.761044 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/161960a2-9537-4f72-913b-54b23f2b4be7-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"161960a2-9537-4f72-913b-54b23f2b4be7\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.767209 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxwkn\" (UniqueName: \"kubernetes.io/projected/161960a2-9537-4f72-913b-54b23f2b4be7-kube-api-access-wxwkn\") pod \"cinder-scheduler-0\" (UID: \"161960a2-9537-4f72-913b-54b23f2b4be7\") " pod="openstack/cinder-scheduler-0" Jan 30 11:13:17 crc kubenswrapper[4869]: I0130 11:13:17.902110 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 30 11:13:18 crc kubenswrapper[4869]: I0130 11:13:18.152353 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9daa784c-29c0-4086-a318-a9c4b73a6244" path="/var/lib/kubelet/pods/9daa784c-29c0-4086-a318-a9c4b73a6244/volumes" Jan 30 11:13:18 crc kubenswrapper[4869]: I0130 11:13:18.389922 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-587f84cd84-zqhxn" event={"ID":"323f79a2-48c7-4768-8707-23bc31755a50","Type":"ContainerStarted","Data":"d02ef15f5ba93cc3d7b5586f76cca1e5e8d3253af837813c4d8c7db13197b4d6"} Jan 30 11:13:18 crc kubenswrapper[4869]: I0130 11:13:18.389974 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-587f84cd84-zqhxn" event={"ID":"323f79a2-48c7-4768-8707-23bc31755a50","Type":"ContainerStarted","Data":"740a2738961798bcc0a9b14e30b355bacd116adc8c93775ec46f0050fa91974f"} Jan 30 11:13:18 crc kubenswrapper[4869]: I0130 11:13:18.390035 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:18 crc kubenswrapper[4869]: I0130 11:13:18.409921 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-587f84cd84-zqhxn" podStartSLOduration=2.409901893 podStartE2EDuration="2.409901893s" podCreationTimestamp="2026-01-30 11:13:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:13:18.409896203 +0000 UTC m=+1148.959772269" watchObservedRunningTime="2026-01-30 11:13:18.409901893 +0000 UTC m=+1148.959777959" Jan 30 11:13:18 crc kubenswrapper[4869]: W0130 11:13:18.473693 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod161960a2_9537_4f72_913b_54b23f2b4be7.slice/crio-ab4e3e504b0ade65472a44b603f13be52f4879320a3ee1e562aad2f21f4df9bf WatchSource:0}: Error finding container ab4e3e504b0ade65472a44b603f13be52f4879320a3ee1e562aad2f21f4df9bf: Status 404 returned error can't find the container with id ab4e3e504b0ade65472a44b603f13be52f4879320a3ee1e562aad2f21f4df9bf Jan 30 11:13:18 crc kubenswrapper[4869]: I0130 11:13:18.475115 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 30 11:13:19 crc kubenswrapper[4869]: I0130 11:13:19.190376 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:19 crc kubenswrapper[4869]: I0130 11:13:19.251977 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:13:19 crc kubenswrapper[4869]: I0130 11:13:19.332979 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6444c94f66-mzq6m"] Jan 30 11:13:19 crc kubenswrapper[4869]: I0130 11:13:19.333253 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6444c94f66-mzq6m" podUID="d1567b59-87d2-4eea-925a-d1b9d8a27e24" containerName="barbican-api-log" containerID="cri-o://e68dbf428c48496c204441fa5ed5a633f8050a2e84a5392482a12bf48f69428a" gracePeriod=30 Jan 30 11:13:19 crc kubenswrapper[4869]: I0130 11:13:19.333744 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6444c94f66-mzq6m" podUID="d1567b59-87d2-4eea-925a-d1b9d8a27e24" containerName="barbican-api" containerID="cri-o://d3bd540a97575f65cb911b5dc7295f39c75d0af8a03199fa2415e0626e39d362" gracePeriod=30 Jan 30 11:13:19 crc kubenswrapper[4869]: I0130 11:13:19.466805 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"161960a2-9537-4f72-913b-54b23f2b4be7","Type":"ContainerStarted","Data":"eb26d980e7e5df4635ca9343e2f611e5ce78edd230c1081616317f662f549187"} Jan 30 11:13:19 crc kubenswrapper[4869]: I0130 11:13:19.466859 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"161960a2-9537-4f72-913b-54b23f2b4be7","Type":"ContainerStarted","Data":"ab4e3e504b0ade65472a44b603f13be52f4879320a3ee1e562aad2f21f4df9bf"} Jan 30 11:13:19 crc kubenswrapper[4869]: I0130 11:13:19.467416 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:20 crc kubenswrapper[4869]: I0130 11:13:20.477721 4869 generic.go:334] "Generic (PLEG): container finished" podID="d1567b59-87d2-4eea-925a-d1b9d8a27e24" containerID="e68dbf428c48496c204441fa5ed5a633f8050a2e84a5392482a12bf48f69428a" exitCode=143 Jan 30 11:13:20 crc kubenswrapper[4869]: I0130 11:13:20.477742 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6444c94f66-mzq6m" event={"ID":"d1567b59-87d2-4eea-925a-d1b9d8a27e24","Type":"ContainerDied","Data":"e68dbf428c48496c204441fa5ed5a633f8050a2e84a5392482a12bf48f69428a"} Jan 30 11:13:20 crc kubenswrapper[4869]: I0130 11:13:20.479472 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"161960a2-9537-4f72-913b-54b23f2b4be7","Type":"ContainerStarted","Data":"12100bca04bc598644a6f4f21b6886c14fe32934d7b28768c12d907536df5008"} Jan 30 11:13:20 crc kubenswrapper[4869]: I0130 11:13:20.503495 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.5034783640000002 podStartE2EDuration="3.503478364s" podCreationTimestamp="2026-01-30 11:13:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:13:20.498934175 +0000 UTC m=+1151.048810241" watchObservedRunningTime="2026-01-30 11:13:20.503478364 +0000 UTC m=+1151.053354430" Jan 30 11:13:21 crc kubenswrapper[4869]: I0130 11:13:21.491760 4869 generic.go:334] "Generic (PLEG): container finished" podID="36d9c803-e141-42ec-a169-66838e70db68" containerID="b5b80d71d4c52800890ddf23dc8a54d41d125816b930b86402cf8924ae35084c" exitCode=0 Jan 30 11:13:21 crc kubenswrapper[4869]: I0130 11:13:21.492852 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b94756495-f44c6" event={"ID":"36d9c803-e141-42ec-a169-66838e70db68","Type":"ContainerDied","Data":"b5b80d71d4c52800890ddf23dc8a54d41d125816b930b86402cf8924ae35084c"} Jan 30 11:13:21 crc kubenswrapper[4869]: I0130 11:13:21.636577 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:13:21 crc kubenswrapper[4869]: I0130 11:13:21.735844 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-httpd-config\") pod \"36d9c803-e141-42ec-a169-66838e70db68\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " Jan 30 11:13:21 crc kubenswrapper[4869]: I0130 11:13:21.735937 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-combined-ca-bundle\") pod \"36d9c803-e141-42ec-a169-66838e70db68\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " Jan 30 11:13:21 crc kubenswrapper[4869]: I0130 11:13:21.735976 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-ovndb-tls-certs\") pod \"36d9c803-e141-42ec-a169-66838e70db68\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " Jan 30 11:13:21 crc kubenswrapper[4869]: I0130 11:13:21.736000 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98sdq\" (UniqueName: \"kubernetes.io/projected/36d9c803-e141-42ec-a169-66838e70db68-kube-api-access-98sdq\") pod \"36d9c803-e141-42ec-a169-66838e70db68\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " Jan 30 11:13:21 crc kubenswrapper[4869]: I0130 11:13:21.736085 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-public-tls-certs\") pod \"36d9c803-e141-42ec-a169-66838e70db68\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " Jan 30 11:13:21 crc kubenswrapper[4869]: I0130 11:13:21.736111 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-config\") pod \"36d9c803-e141-42ec-a169-66838e70db68\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " Jan 30 11:13:21 crc kubenswrapper[4869]: I0130 11:13:21.736334 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-internal-tls-certs\") pod \"36d9c803-e141-42ec-a169-66838e70db68\" (UID: \"36d9c803-e141-42ec-a169-66838e70db68\") " Jan 30 11:13:21 crc kubenswrapper[4869]: I0130 11:13:21.759358 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "36d9c803-e141-42ec-a169-66838e70db68" (UID: "36d9c803-e141-42ec-a169-66838e70db68"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:21 crc kubenswrapper[4869]: I0130 11:13:21.762998 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36d9c803-e141-42ec-a169-66838e70db68-kube-api-access-98sdq" (OuterVolumeSpecName: "kube-api-access-98sdq") pod "36d9c803-e141-42ec-a169-66838e70db68" (UID: "36d9c803-e141-42ec-a169-66838e70db68"). InnerVolumeSpecName "kube-api-access-98sdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:13:21 crc kubenswrapper[4869]: I0130 11:13:21.828869 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-config" (OuterVolumeSpecName: "config") pod "36d9c803-e141-42ec-a169-66838e70db68" (UID: "36d9c803-e141-42ec-a169-66838e70db68"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:21 crc kubenswrapper[4869]: I0130 11:13:21.838178 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "36d9c803-e141-42ec-a169-66838e70db68" (UID: "36d9c803-e141-42ec-a169-66838e70db68"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:21 crc kubenswrapper[4869]: I0130 11:13:21.838735 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98sdq\" (UniqueName: \"kubernetes.io/projected/36d9c803-e141-42ec-a169-66838e70db68-kube-api-access-98sdq\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:21 crc kubenswrapper[4869]: I0130 11:13:21.838764 4869 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:21 crc kubenswrapper[4869]: I0130 11:13:21.838773 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:21 crc kubenswrapper[4869]: I0130 11:13:21.838785 4869 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:21 crc kubenswrapper[4869]: I0130 11:13:21.840960 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "36d9c803-e141-42ec-a169-66838e70db68" (UID: "36d9c803-e141-42ec-a169-66838e70db68"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:21 crc kubenswrapper[4869]: I0130 11:13:21.849293 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "36d9c803-e141-42ec-a169-66838e70db68" (UID: "36d9c803-e141-42ec-a169-66838e70db68"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:21 crc kubenswrapper[4869]: I0130 11:13:21.866936 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "36d9c803-e141-42ec-a169-66838e70db68" (UID: "36d9c803-e141-42ec-a169-66838e70db68"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:21 crc kubenswrapper[4869]: I0130 11:13:21.940231 4869 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:21 crc kubenswrapper[4869]: I0130 11:13:21.940269 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:21 crc kubenswrapper[4869]: I0130 11:13:21.940283 4869 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/36d9c803-e141-42ec-a169-66838e70db68-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:22 crc kubenswrapper[4869]: I0130 11:13:22.190637 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Jan 30 11:13:22 crc kubenswrapper[4869]: I0130 11:13:22.505288 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b94756495-f44c6" event={"ID":"36d9c803-e141-42ec-a169-66838e70db68","Type":"ContainerDied","Data":"ff6a318f34b4f7709b167bb6e09140f856f6f70a0b1f1ea0f6fc8d5e94c32877"} Jan 30 11:13:22 crc kubenswrapper[4869]: I0130 11:13:22.505378 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6b94756495-f44c6" Jan 30 11:13:22 crc kubenswrapper[4869]: I0130 11:13:22.505399 4869 scope.go:117] "RemoveContainer" containerID="9c8a94bbfc90e47a8d2c00ce2fcbb49a27c39f7b15d444037af8085e4789dd3a" Jan 30 11:13:22 crc kubenswrapper[4869]: I0130 11:13:22.538865 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6b94756495-f44c6"] Jan 30 11:13:22 crc kubenswrapper[4869]: I0130 11:13:22.546651 4869 scope.go:117] "RemoveContainer" containerID="b5b80d71d4c52800890ddf23dc8a54d41d125816b930b86402cf8924ae35084c" Jan 30 11:13:22 crc kubenswrapper[4869]: I0130 11:13:22.551684 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-6b94756495-f44c6"] Jan 30 11:13:22 crc kubenswrapper[4869]: I0130 11:13:22.863334 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6444c94f66-mzq6m" podUID="d1567b59-87d2-4eea-925a-d1b9d8a27e24" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": read tcp 10.217.0.2:44558->10.217.0.159:9311: read: connection reset by peer" Jan 30 11:13:22 crc kubenswrapper[4869]: I0130 11:13:22.864367 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6444c94f66-mzq6m" podUID="d1567b59-87d2-4eea-925a-d1b9d8a27e24" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": read tcp 10.217.0.2:44560->10.217.0.159:9311: read: connection reset by peer" Jan 30 11:13:22 crc kubenswrapper[4869]: I0130 11:13:22.903403 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.019743 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.417344 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6444c94f66-mzq6m" Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.490645 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfs9n\" (UniqueName: \"kubernetes.io/projected/d1567b59-87d2-4eea-925a-d1b9d8a27e24-kube-api-access-jfs9n\") pod \"d1567b59-87d2-4eea-925a-d1b9d8a27e24\" (UID: \"d1567b59-87d2-4eea-925a-d1b9d8a27e24\") " Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.490864 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1567b59-87d2-4eea-925a-d1b9d8a27e24-config-data\") pod \"d1567b59-87d2-4eea-925a-d1b9d8a27e24\" (UID: \"d1567b59-87d2-4eea-925a-d1b9d8a27e24\") " Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.490907 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d1567b59-87d2-4eea-925a-d1b9d8a27e24-config-data-custom\") pod \"d1567b59-87d2-4eea-925a-d1b9d8a27e24\" (UID: \"d1567b59-87d2-4eea-925a-d1b9d8a27e24\") " Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.490932 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d1567b59-87d2-4eea-925a-d1b9d8a27e24-logs\") pod \"d1567b59-87d2-4eea-925a-d1b9d8a27e24\" (UID: \"d1567b59-87d2-4eea-925a-d1b9d8a27e24\") " Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.490982 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1567b59-87d2-4eea-925a-d1b9d8a27e24-combined-ca-bundle\") pod \"d1567b59-87d2-4eea-925a-d1b9d8a27e24\" (UID: \"d1567b59-87d2-4eea-925a-d1b9d8a27e24\") " Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.498697 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1567b59-87d2-4eea-925a-d1b9d8a27e24-logs" (OuterVolumeSpecName: "logs") pod "d1567b59-87d2-4eea-925a-d1b9d8a27e24" (UID: "d1567b59-87d2-4eea-925a-d1b9d8a27e24"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.501501 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1567b59-87d2-4eea-925a-d1b9d8a27e24-kube-api-access-jfs9n" (OuterVolumeSpecName: "kube-api-access-jfs9n") pod "d1567b59-87d2-4eea-925a-d1b9d8a27e24" (UID: "d1567b59-87d2-4eea-925a-d1b9d8a27e24"). InnerVolumeSpecName "kube-api-access-jfs9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.512987 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1567b59-87d2-4eea-925a-d1b9d8a27e24-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d1567b59-87d2-4eea-925a-d1b9d8a27e24" (UID: "d1567b59-87d2-4eea-925a-d1b9d8a27e24"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.543215 4869 generic.go:334] "Generic (PLEG): container finished" podID="d1567b59-87d2-4eea-925a-d1b9d8a27e24" containerID="d3bd540a97575f65cb911b5dc7295f39c75d0af8a03199fa2415e0626e39d362" exitCode=0 Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.543293 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6444c94f66-mzq6m" event={"ID":"d1567b59-87d2-4eea-925a-d1b9d8a27e24","Type":"ContainerDied","Data":"d3bd540a97575f65cb911b5dc7295f39c75d0af8a03199fa2415e0626e39d362"} Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.543336 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6444c94f66-mzq6m" event={"ID":"d1567b59-87d2-4eea-925a-d1b9d8a27e24","Type":"ContainerDied","Data":"0cdd2a878674f28a4955f6ff1bb8d955b8cc1387febf79fc2dbe657dbd52626a"} Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.543364 4869 scope.go:117] "RemoveContainer" containerID="d3bd540a97575f65cb911b5dc7295f39c75d0af8a03199fa2415e0626e39d362" Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.543534 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6444c94f66-mzq6m" Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.555315 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1567b59-87d2-4eea-925a-d1b9d8a27e24-config-data" (OuterVolumeSpecName: "config-data") pod "d1567b59-87d2-4eea-925a-d1b9d8a27e24" (UID: "d1567b59-87d2-4eea-925a-d1b9d8a27e24"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.574968 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1567b59-87d2-4eea-925a-d1b9d8a27e24-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d1567b59-87d2-4eea-925a-d1b9d8a27e24" (UID: "d1567b59-87d2-4eea-925a-d1b9d8a27e24"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.593009 4869 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d1567b59-87d2-4eea-925a-d1b9d8a27e24-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.593054 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d1567b59-87d2-4eea-925a-d1b9d8a27e24-logs\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.593064 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1567b59-87d2-4eea-925a-d1b9d8a27e24-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.593074 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfs9n\" (UniqueName: \"kubernetes.io/projected/d1567b59-87d2-4eea-925a-d1b9d8a27e24-kube-api-access-jfs9n\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.593083 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1567b59-87d2-4eea-925a-d1b9d8a27e24-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.649504 4869 scope.go:117] "RemoveContainer" containerID="e68dbf428c48496c204441fa5ed5a633f8050a2e84a5392482a12bf48f69428a" Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.676673 4869 scope.go:117] "RemoveContainer" containerID="d3bd540a97575f65cb911b5dc7295f39c75d0af8a03199fa2415e0626e39d362" Jan 30 11:13:23 crc kubenswrapper[4869]: E0130 11:13:23.677271 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3bd540a97575f65cb911b5dc7295f39c75d0af8a03199fa2415e0626e39d362\": container with ID starting with d3bd540a97575f65cb911b5dc7295f39c75d0af8a03199fa2415e0626e39d362 not found: ID does not exist" containerID="d3bd540a97575f65cb911b5dc7295f39c75d0af8a03199fa2415e0626e39d362" Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.677323 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3bd540a97575f65cb911b5dc7295f39c75d0af8a03199fa2415e0626e39d362"} err="failed to get container status \"d3bd540a97575f65cb911b5dc7295f39c75d0af8a03199fa2415e0626e39d362\": rpc error: code = NotFound desc = could not find container \"d3bd540a97575f65cb911b5dc7295f39c75d0af8a03199fa2415e0626e39d362\": container with ID starting with d3bd540a97575f65cb911b5dc7295f39c75d0af8a03199fa2415e0626e39d362 not found: ID does not exist" Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.677350 4869 scope.go:117] "RemoveContainer" containerID="e68dbf428c48496c204441fa5ed5a633f8050a2e84a5392482a12bf48f69428a" Jan 30 11:13:23 crc kubenswrapper[4869]: E0130 11:13:23.678085 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e68dbf428c48496c204441fa5ed5a633f8050a2e84a5392482a12bf48f69428a\": container with ID starting with e68dbf428c48496c204441fa5ed5a633f8050a2e84a5392482a12bf48f69428a not found: ID does not exist" containerID="e68dbf428c48496c204441fa5ed5a633f8050a2e84a5392482a12bf48f69428a" Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.678154 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e68dbf428c48496c204441fa5ed5a633f8050a2e84a5392482a12bf48f69428a"} err="failed to get container status \"e68dbf428c48496c204441fa5ed5a633f8050a2e84a5392482a12bf48f69428a\": rpc error: code = NotFound desc = could not find container \"e68dbf428c48496c204441fa5ed5a633f8050a2e84a5392482a12bf48f69428a\": container with ID starting with e68dbf428c48496c204441fa5ed5a633f8050a2e84a5392482a12bf48f69428a not found: ID does not exist" Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.881244 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6444c94f66-mzq6m"] Jan 30 11:13:23 crc kubenswrapper[4869]: I0130 11:13:23.898295 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-6444c94f66-mzq6m"] Jan 30 11:13:24 crc kubenswrapper[4869]: I0130 11:13:24.143654 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36d9c803-e141-42ec-a169-66838e70db68" path="/var/lib/kubelet/pods/36d9c803-e141-42ec-a169-66838e70db68/volumes" Jan 30 11:13:24 crc kubenswrapper[4869]: I0130 11:13:24.144522 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1567b59-87d2-4eea-925a-d1b9d8a27e24" path="/var/lib/kubelet/pods/d1567b59-87d2-4eea-925a-d1b9d8a27e24/volumes" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.650476 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Jan 30 11:13:26 crc kubenswrapper[4869]: E0130 11:13:26.651136 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36d9c803-e141-42ec-a169-66838e70db68" containerName="neutron-api" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.651150 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="36d9c803-e141-42ec-a169-66838e70db68" containerName="neutron-api" Jan 30 11:13:26 crc kubenswrapper[4869]: E0130 11:13:26.651162 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1567b59-87d2-4eea-925a-d1b9d8a27e24" containerName="barbican-api" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.651168 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1567b59-87d2-4eea-925a-d1b9d8a27e24" containerName="barbican-api" Jan 30 11:13:26 crc kubenswrapper[4869]: E0130 11:13:26.651188 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36d9c803-e141-42ec-a169-66838e70db68" containerName="neutron-httpd" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.651194 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="36d9c803-e141-42ec-a169-66838e70db68" containerName="neutron-httpd" Jan 30 11:13:26 crc kubenswrapper[4869]: E0130 11:13:26.651217 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1567b59-87d2-4eea-925a-d1b9d8a27e24" containerName="barbican-api-log" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.651223 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1567b59-87d2-4eea-925a-d1b9d8a27e24" containerName="barbican-api-log" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.651381 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1567b59-87d2-4eea-925a-d1b9d8a27e24" containerName="barbican-api-log" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.651393 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="36d9c803-e141-42ec-a169-66838e70db68" containerName="neutron-api" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.651411 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="36d9c803-e141-42ec-a169-66838e70db68" containerName="neutron-httpd" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.651421 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1567b59-87d2-4eea-925a-d1b9d8a27e24" containerName="barbican-api" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.651995 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.654899 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.654948 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.654949 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-jdpjh" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.665181 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.747275 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e7503066-4e9b-410e-b83e-04ec6c2dc05c-openstack-config-secret\") pod \"openstackclient\" (UID: \"e7503066-4e9b-410e-b83e-04ec6c2dc05c\") " pod="openstack/openstackclient" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.747400 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e7503066-4e9b-410e-b83e-04ec6c2dc05c-openstack-config\") pod \"openstackclient\" (UID: \"e7503066-4e9b-410e-b83e-04ec6c2dc05c\") " pod="openstack/openstackclient" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.747535 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7503066-4e9b-410e-b83e-04ec6c2dc05c-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e7503066-4e9b-410e-b83e-04ec6c2dc05c\") " pod="openstack/openstackclient" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.747734 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2nh6\" (UniqueName: \"kubernetes.io/projected/e7503066-4e9b-410e-b83e-04ec6c2dc05c-kube-api-access-m2nh6\") pod \"openstackclient\" (UID: \"e7503066-4e9b-410e-b83e-04ec6c2dc05c\") " pod="openstack/openstackclient" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.849125 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2nh6\" (UniqueName: \"kubernetes.io/projected/e7503066-4e9b-410e-b83e-04ec6c2dc05c-kube-api-access-m2nh6\") pod \"openstackclient\" (UID: \"e7503066-4e9b-410e-b83e-04ec6c2dc05c\") " pod="openstack/openstackclient" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.849250 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e7503066-4e9b-410e-b83e-04ec6c2dc05c-openstack-config-secret\") pod \"openstackclient\" (UID: \"e7503066-4e9b-410e-b83e-04ec6c2dc05c\") " pod="openstack/openstackclient" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.849313 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e7503066-4e9b-410e-b83e-04ec6c2dc05c-openstack-config\") pod \"openstackclient\" (UID: \"e7503066-4e9b-410e-b83e-04ec6c2dc05c\") " pod="openstack/openstackclient" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.849347 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7503066-4e9b-410e-b83e-04ec6c2dc05c-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e7503066-4e9b-410e-b83e-04ec6c2dc05c\") " pod="openstack/openstackclient" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.850570 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e7503066-4e9b-410e-b83e-04ec6c2dc05c-openstack-config\") pod \"openstackclient\" (UID: \"e7503066-4e9b-410e-b83e-04ec6c2dc05c\") " pod="openstack/openstackclient" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.855642 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e7503066-4e9b-410e-b83e-04ec6c2dc05c-openstack-config-secret\") pod \"openstackclient\" (UID: \"e7503066-4e9b-410e-b83e-04ec6c2dc05c\") " pod="openstack/openstackclient" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.855830 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7503066-4e9b-410e-b83e-04ec6c2dc05c-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e7503066-4e9b-410e-b83e-04ec6c2dc05c\") " pod="openstack/openstackclient" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.884338 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2nh6\" (UniqueName: \"kubernetes.io/projected/e7503066-4e9b-410e-b83e-04ec6c2dc05c-kube-api-access-m2nh6\") pod \"openstackclient\" (UID: \"e7503066-4e9b-410e-b83e-04ec6c2dc05c\") " pod="openstack/openstackclient" Jan 30 11:13:26 crc kubenswrapper[4869]: I0130 11:13:26.972820 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 30 11:13:27 crc kubenswrapper[4869]: I0130 11:13:27.470283 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 30 11:13:27 crc kubenswrapper[4869]: I0130 11:13:27.576592 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"e7503066-4e9b-410e-b83e-04ec6c2dc05c","Type":"ContainerStarted","Data":"11e62c21f84264deb2f560ac8ea30579b4816391ade5e75da8a6c556591b1e72"} Jan 30 11:13:28 crc kubenswrapper[4869]: I0130 11:13:28.151098 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jan 30 11:13:30 crc kubenswrapper[4869]: I0130 11:13:30.883486 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-644f9f48bf-ccrr2"] Jan 30 11:13:30 crc kubenswrapper[4869]: I0130 11:13:30.885829 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:30 crc kubenswrapper[4869]: I0130 11:13:30.892115 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Jan 30 11:13:30 crc kubenswrapper[4869]: I0130 11:13:30.894126 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Jan 30 11:13:30 crc kubenswrapper[4869]: I0130 11:13:30.894156 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Jan 30 11:13:30 crc kubenswrapper[4869]: I0130 11:13:30.915800 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-644f9f48bf-ccrr2"] Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.074392 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-combined-ca-bundle\") pod \"swift-proxy-644f9f48bf-ccrr2\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.074578 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-internal-tls-certs\") pod \"swift-proxy-644f9f48bf-ccrr2\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.075307 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-run-httpd\") pod \"swift-proxy-644f9f48bf-ccrr2\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.075408 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-config-data\") pod \"swift-proxy-644f9f48bf-ccrr2\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.075787 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqjkw\" (UniqueName: \"kubernetes.io/projected/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-kube-api-access-nqjkw\") pod \"swift-proxy-644f9f48bf-ccrr2\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.075909 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-public-tls-certs\") pod \"swift-proxy-644f9f48bf-ccrr2\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.075949 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-log-httpd\") pod \"swift-proxy-644f9f48bf-ccrr2\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.075984 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-etc-swift\") pod \"swift-proxy-644f9f48bf-ccrr2\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.178654 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-combined-ca-bundle\") pod \"swift-proxy-644f9f48bf-ccrr2\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.178911 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-internal-tls-certs\") pod \"swift-proxy-644f9f48bf-ccrr2\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.179002 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-run-httpd\") pod \"swift-proxy-644f9f48bf-ccrr2\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.179065 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-config-data\") pod \"swift-proxy-644f9f48bf-ccrr2\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.179112 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqjkw\" (UniqueName: \"kubernetes.io/projected/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-kube-api-access-nqjkw\") pod \"swift-proxy-644f9f48bf-ccrr2\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.179188 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-public-tls-certs\") pod \"swift-proxy-644f9f48bf-ccrr2\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.179232 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-log-httpd\") pod \"swift-proxy-644f9f48bf-ccrr2\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.179323 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-etc-swift\") pod \"swift-proxy-644f9f48bf-ccrr2\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.179542 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-run-httpd\") pod \"swift-proxy-644f9f48bf-ccrr2\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.180392 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-log-httpd\") pod \"swift-proxy-644f9f48bf-ccrr2\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.195976 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-public-tls-certs\") pod \"swift-proxy-644f9f48bf-ccrr2\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.195985 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-internal-tls-certs\") pod \"swift-proxy-644f9f48bf-ccrr2\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.196843 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-etc-swift\") pod \"swift-proxy-644f9f48bf-ccrr2\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.196892 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-combined-ca-bundle\") pod \"swift-proxy-644f9f48bf-ccrr2\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.197442 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-config-data\") pod \"swift-proxy-644f9f48bf-ccrr2\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.203524 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqjkw\" (UniqueName: \"kubernetes.io/projected/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-kube-api-access-nqjkw\") pod \"swift-proxy-644f9f48bf-ccrr2\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.228413 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:31 crc kubenswrapper[4869]: I0130 11:13:31.821203 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-644f9f48bf-ccrr2"] Jan 30 11:13:32 crc kubenswrapper[4869]: I0130 11:13:32.085490 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:13:32 crc kubenswrapper[4869]: I0130 11:13:32.085959 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5c96d522-04a0-49df-a9a8-dc050f71c013" containerName="sg-core" containerID="cri-o://8325878f717fad44d46d05bf7a50da4416b33e2e79185d6fc54d42d9a8709030" gracePeriod=30 Jan 30 11:13:32 crc kubenswrapper[4869]: I0130 11:13:32.086169 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5c96d522-04a0-49df-a9a8-dc050f71c013" containerName="proxy-httpd" containerID="cri-o://51983a9333b068ad5d372df323d63b6590131ffeece45df7f80a65e2622fa70d" gracePeriod=30 Jan 30 11:13:32 crc kubenswrapper[4869]: I0130 11:13:32.086267 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5c96d522-04a0-49df-a9a8-dc050f71c013" containerName="ceilometer-notification-agent" containerID="cri-o://09832317b501fab85b36ef81db3ac822f4d1175b57d021d0bdbff1cd40358777" gracePeriod=30 Jan 30 11:13:32 crc kubenswrapper[4869]: I0130 11:13:32.085863 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5c96d522-04a0-49df-a9a8-dc050f71c013" containerName="ceilometer-central-agent" containerID="cri-o://c4177dc801127480d4f05284eedb2be6ebfdb2908c611e359348f5b2bac15d52" gracePeriod=30 Jan 30 11:13:32 crc kubenswrapper[4869]: I0130 11:13:32.100674 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="5c96d522-04a0-49df-a9a8-dc050f71c013" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.163:3000/\": EOF" Jan 30 11:13:32 crc kubenswrapper[4869]: I0130 11:13:32.631419 4869 generic.go:334] "Generic (PLEG): container finished" podID="5c96d522-04a0-49df-a9a8-dc050f71c013" containerID="51983a9333b068ad5d372df323d63b6590131ffeece45df7f80a65e2622fa70d" exitCode=0 Jan 30 11:13:32 crc kubenswrapper[4869]: I0130 11:13:32.631454 4869 generic.go:334] "Generic (PLEG): container finished" podID="5c96d522-04a0-49df-a9a8-dc050f71c013" containerID="8325878f717fad44d46d05bf7a50da4416b33e2e79185d6fc54d42d9a8709030" exitCode=2 Jan 30 11:13:32 crc kubenswrapper[4869]: I0130 11:13:32.631464 4869 generic.go:334] "Generic (PLEG): container finished" podID="5c96d522-04a0-49df-a9a8-dc050f71c013" containerID="c4177dc801127480d4f05284eedb2be6ebfdb2908c611e359348f5b2bac15d52" exitCode=0 Jan 30 11:13:32 crc kubenswrapper[4869]: I0130 11:13:32.631489 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c96d522-04a0-49df-a9a8-dc050f71c013","Type":"ContainerDied","Data":"51983a9333b068ad5d372df323d63b6590131ffeece45df7f80a65e2622fa70d"} Jan 30 11:13:32 crc kubenswrapper[4869]: I0130 11:13:32.631567 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c96d522-04a0-49df-a9a8-dc050f71c013","Type":"ContainerDied","Data":"8325878f717fad44d46d05bf7a50da4416b33e2e79185d6fc54d42d9a8709030"} Jan 30 11:13:32 crc kubenswrapper[4869]: I0130 11:13:32.631585 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c96d522-04a0-49df-a9a8-dc050f71c013","Type":"ContainerDied","Data":"c4177dc801127480d4f05284eedb2be6ebfdb2908c611e359348f5b2bac15d52"} Jan 30 11:13:35 crc kubenswrapper[4869]: I0130 11:13:35.516407 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="5c96d522-04a0-49df-a9a8-dc050f71c013" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.163:3000/\": dial tcp 10.217.0.163:3000: connect: connection refused" Jan 30 11:13:35 crc kubenswrapper[4869]: I0130 11:13:35.668742 4869 generic.go:334] "Generic (PLEG): container finished" podID="5c96d522-04a0-49df-a9a8-dc050f71c013" containerID="09832317b501fab85b36ef81db3ac822f4d1175b57d021d0bdbff1cd40358777" exitCode=0 Jan 30 11:13:35 crc kubenswrapper[4869]: I0130 11:13:35.668797 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c96d522-04a0-49df-a9a8-dc050f71c013","Type":"ContainerDied","Data":"09832317b501fab85b36ef81db3ac822f4d1175b57d021d0bdbff1cd40358777"} Jan 30 11:13:37 crc kubenswrapper[4869]: W0130 11:13:37.320039 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode72fb5b4_6762_4a1b_aae6_f2cbf4b7f69f.slice/crio-7694188781fbf71f3d2d8ff8e3eaf626d758903a5959565bb1ec077f16e0ceae WatchSource:0}: Error finding container 7694188781fbf71f3d2d8ff8e3eaf626d758903a5959565bb1ec077f16e0ceae: Status 404 returned error can't find the container with id 7694188781fbf71f3d2d8ff8e3eaf626d758903a5959565bb1ec077f16e0ceae Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.639417 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.697917 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-644f9f48bf-ccrr2" event={"ID":"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f","Type":"ContainerStarted","Data":"e05f057e1226a4ecd5362991160142f5969097d16d333ab689e4639d3978f4f8"} Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.698247 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-644f9f48bf-ccrr2" event={"ID":"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f","Type":"ContainerStarted","Data":"7694188781fbf71f3d2d8ff8e3eaf626d758903a5959565bb1ec077f16e0ceae"} Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.704457 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c96d522-04a0-49df-a9a8-dc050f71c013","Type":"ContainerDied","Data":"a2755119c1c666678412543710423174a385aa2323b32ce68990ba2fe8b39c85"} Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.704608 4869 scope.go:117] "RemoveContainer" containerID="51983a9333b068ad5d372df323d63b6590131ffeece45df7f80a65e2622fa70d" Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.704668 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.723927 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.724360 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4" containerName="glance-log" containerID="cri-o://f9873d09f9ac0f070f0f8c68499512935ae1991883fd4cc8fd121f957ada799e" gracePeriod=30 Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.724694 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4" containerName="glance-httpd" containerID="cri-o://ac3b0f15aa26a1812a34be06ccea93eba16d3091e2d29b3b3ec7f99355ee6277" gracePeriod=30 Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.743903 4869 scope.go:117] "RemoveContainer" containerID="8325878f717fad44d46d05bf7a50da4416b33e2e79185d6fc54d42d9a8709030" Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.789618 4869 scope.go:117] "RemoveContainer" containerID="09832317b501fab85b36ef81db3ac822f4d1175b57d021d0bdbff1cd40358777" Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.813035 4869 scope.go:117] "RemoveContainer" containerID="c4177dc801127480d4f05284eedb2be6ebfdb2908c611e359348f5b2bac15d52" Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.825079 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c96d522-04a0-49df-a9a8-dc050f71c013-combined-ca-bundle\") pod \"5c96d522-04a0-49df-a9a8-dc050f71c013\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.825174 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c96d522-04a0-49df-a9a8-dc050f71c013-config-data\") pod \"5c96d522-04a0-49df-a9a8-dc050f71c013\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.825273 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c96d522-04a0-49df-a9a8-dc050f71c013-run-httpd\") pod \"5c96d522-04a0-49df-a9a8-dc050f71c013\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.825307 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c96d522-04a0-49df-a9a8-dc050f71c013-log-httpd\") pod \"5c96d522-04a0-49df-a9a8-dc050f71c013\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.825328 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nxs5m\" (UniqueName: \"kubernetes.io/projected/5c96d522-04a0-49df-a9a8-dc050f71c013-kube-api-access-nxs5m\") pod \"5c96d522-04a0-49df-a9a8-dc050f71c013\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.825362 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c96d522-04a0-49df-a9a8-dc050f71c013-scripts\") pod \"5c96d522-04a0-49df-a9a8-dc050f71c013\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.825438 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c96d522-04a0-49df-a9a8-dc050f71c013-sg-core-conf-yaml\") pod \"5c96d522-04a0-49df-a9a8-dc050f71c013\" (UID: \"5c96d522-04a0-49df-a9a8-dc050f71c013\") " Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.825945 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c96d522-04a0-49df-a9a8-dc050f71c013-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5c96d522-04a0-49df-a9a8-dc050f71c013" (UID: "5c96d522-04a0-49df-a9a8-dc050f71c013"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.826702 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c96d522-04a0-49df-a9a8-dc050f71c013-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5c96d522-04a0-49df-a9a8-dc050f71c013" (UID: "5c96d522-04a0-49df-a9a8-dc050f71c013"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.840996 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c96d522-04a0-49df-a9a8-dc050f71c013-scripts" (OuterVolumeSpecName: "scripts") pod "5c96d522-04a0-49df-a9a8-dc050f71c013" (UID: "5c96d522-04a0-49df-a9a8-dc050f71c013"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.841056 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c96d522-04a0-49df-a9a8-dc050f71c013-kube-api-access-nxs5m" (OuterVolumeSpecName: "kube-api-access-nxs5m") pod "5c96d522-04a0-49df-a9a8-dc050f71c013" (UID: "5c96d522-04a0-49df-a9a8-dc050f71c013"). InnerVolumeSpecName "kube-api-access-nxs5m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.869276 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c96d522-04a0-49df-a9a8-dc050f71c013-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5c96d522-04a0-49df-a9a8-dc050f71c013" (UID: "5c96d522-04a0-49df-a9a8-dc050f71c013"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.914865 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c96d522-04a0-49df-a9a8-dc050f71c013-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5c96d522-04a0-49df-a9a8-dc050f71c013" (UID: "5c96d522-04a0-49df-a9a8-dc050f71c013"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.927760 4869 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c96d522-04a0-49df-a9a8-dc050f71c013-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.927804 4869 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c96d522-04a0-49df-a9a8-dc050f71c013-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.927819 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nxs5m\" (UniqueName: \"kubernetes.io/projected/5c96d522-04a0-49df-a9a8-dc050f71c013-kube-api-access-nxs5m\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.927835 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c96d522-04a0-49df-a9a8-dc050f71c013-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.927847 4869 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c96d522-04a0-49df-a9a8-dc050f71c013-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.927858 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c96d522-04a0-49df-a9a8-dc050f71c013-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:37 crc kubenswrapper[4869]: I0130 11:13:37.948214 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c96d522-04a0-49df-a9a8-dc050f71c013-config-data" (OuterVolumeSpecName: "config-data") pod "5c96d522-04a0-49df-a9a8-dc050f71c013" (UID: "5c96d522-04a0-49df-a9a8-dc050f71c013"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.030523 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c96d522-04a0-49df-a9a8-dc050f71c013-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.040842 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.051215 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.063735 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:13:38 crc kubenswrapper[4869]: E0130 11:13:38.064271 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c96d522-04a0-49df-a9a8-dc050f71c013" containerName="sg-core" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.064304 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c96d522-04a0-49df-a9a8-dc050f71c013" containerName="sg-core" Jan 30 11:13:38 crc kubenswrapper[4869]: E0130 11:13:38.064346 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c96d522-04a0-49df-a9a8-dc050f71c013" containerName="ceilometer-notification-agent" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.064358 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c96d522-04a0-49df-a9a8-dc050f71c013" containerName="ceilometer-notification-agent" Jan 30 11:13:38 crc kubenswrapper[4869]: E0130 11:13:38.064373 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c96d522-04a0-49df-a9a8-dc050f71c013" containerName="proxy-httpd" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.064382 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c96d522-04a0-49df-a9a8-dc050f71c013" containerName="proxy-httpd" Jan 30 11:13:38 crc kubenswrapper[4869]: E0130 11:13:38.064391 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c96d522-04a0-49df-a9a8-dc050f71c013" containerName="ceilometer-central-agent" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.064398 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c96d522-04a0-49df-a9a8-dc050f71c013" containerName="ceilometer-central-agent" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.064625 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c96d522-04a0-49df-a9a8-dc050f71c013" containerName="ceilometer-notification-agent" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.064644 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c96d522-04a0-49df-a9a8-dc050f71c013" containerName="sg-core" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.064667 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c96d522-04a0-49df-a9a8-dc050f71c013" containerName="proxy-httpd" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.064689 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c96d522-04a0-49df-a9a8-dc050f71c013" containerName="ceilometer-central-agent" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.066840 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.068945 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.069563 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.074877 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.147215 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c96d522-04a0-49df-a9a8-dc050f71c013" path="/var/lib/kubelet/pods/5c96d522-04a0-49df-a9a8-dc050f71c013/volumes" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.233757 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c06f8263-5706-4344-ae5f-86c39b0ab980-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " pod="openstack/ceilometer-0" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.234124 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hc5jb\" (UniqueName: \"kubernetes.io/projected/c06f8263-5706-4344-ae5f-86c39b0ab980-kube-api-access-hc5jb\") pod \"ceilometer-0\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " pod="openstack/ceilometer-0" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.234160 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c06f8263-5706-4344-ae5f-86c39b0ab980-log-httpd\") pod \"ceilometer-0\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " pod="openstack/ceilometer-0" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.234535 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c06f8263-5706-4344-ae5f-86c39b0ab980-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " pod="openstack/ceilometer-0" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.234590 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c06f8263-5706-4344-ae5f-86c39b0ab980-config-data\") pod \"ceilometer-0\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " pod="openstack/ceilometer-0" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.234761 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c06f8263-5706-4344-ae5f-86c39b0ab980-run-httpd\") pod \"ceilometer-0\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " pod="openstack/ceilometer-0" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.235505 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c06f8263-5706-4344-ae5f-86c39b0ab980-scripts\") pod \"ceilometer-0\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " pod="openstack/ceilometer-0" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.337821 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c06f8263-5706-4344-ae5f-86c39b0ab980-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " pod="openstack/ceilometer-0" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.337935 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hc5jb\" (UniqueName: \"kubernetes.io/projected/c06f8263-5706-4344-ae5f-86c39b0ab980-kube-api-access-hc5jb\") pod \"ceilometer-0\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " pod="openstack/ceilometer-0" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.337961 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c06f8263-5706-4344-ae5f-86c39b0ab980-log-httpd\") pod \"ceilometer-0\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " pod="openstack/ceilometer-0" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.337995 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c06f8263-5706-4344-ae5f-86c39b0ab980-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " pod="openstack/ceilometer-0" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.338023 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c06f8263-5706-4344-ae5f-86c39b0ab980-config-data\") pod \"ceilometer-0\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " pod="openstack/ceilometer-0" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.338043 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c06f8263-5706-4344-ae5f-86c39b0ab980-run-httpd\") pod \"ceilometer-0\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " pod="openstack/ceilometer-0" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.338113 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c06f8263-5706-4344-ae5f-86c39b0ab980-scripts\") pod \"ceilometer-0\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " pod="openstack/ceilometer-0" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.338498 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c06f8263-5706-4344-ae5f-86c39b0ab980-log-httpd\") pod \"ceilometer-0\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " pod="openstack/ceilometer-0" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.338565 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c06f8263-5706-4344-ae5f-86c39b0ab980-run-httpd\") pod \"ceilometer-0\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " pod="openstack/ceilometer-0" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.344322 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c06f8263-5706-4344-ae5f-86c39b0ab980-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " pod="openstack/ceilometer-0" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.344640 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c06f8263-5706-4344-ae5f-86c39b0ab980-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " pod="openstack/ceilometer-0" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.345335 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c06f8263-5706-4344-ae5f-86c39b0ab980-config-data\") pod \"ceilometer-0\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " pod="openstack/ceilometer-0" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.345351 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c06f8263-5706-4344-ae5f-86c39b0ab980-scripts\") pod \"ceilometer-0\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " pod="openstack/ceilometer-0" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.360230 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hc5jb\" (UniqueName: \"kubernetes.io/projected/c06f8263-5706-4344-ae5f-86c39b0ab980-kube-api-access-hc5jb\") pod \"ceilometer-0\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " pod="openstack/ceilometer-0" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.419172 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.723855 4869 generic.go:334] "Generic (PLEG): container finished" podID="6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4" containerID="f9873d09f9ac0f070f0f8c68499512935ae1991883fd4cc8fd121f957ada799e" exitCode=143 Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.724327 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4","Type":"ContainerDied","Data":"f9873d09f9ac0f070f0f8c68499512935ae1991883fd4cc8fd121f957ada799e"} Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.726825 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-644f9f48bf-ccrr2" event={"ID":"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f","Type":"ContainerStarted","Data":"30e76731331681ee54fc2b12405950fb45c6d15c16f3d7f16a01c29ca55daa7e"} Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.727245 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.727301 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.729973 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"e7503066-4e9b-410e-b83e-04ec6c2dc05c","Type":"ContainerStarted","Data":"c12e6d9a2497ce86f69e26eedbbe77a594f0dded08c3af2c1fa95dcc0378cc5e"} Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.790141 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-644f9f48bf-ccrr2" podStartSLOduration=8.790113932 podStartE2EDuration="8.790113932s" podCreationTimestamp="2026-01-30 11:13:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:13:38.756194389 +0000 UTC m=+1169.306070445" watchObservedRunningTime="2026-01-30 11:13:38.790113932 +0000 UTC m=+1169.339989998" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.792062 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.867885426 podStartE2EDuration="12.792051137s" podCreationTimestamp="2026-01-30 11:13:26 +0000 UTC" firstStartedPulling="2026-01-30 11:13:27.49217588 +0000 UTC m=+1158.042051946" lastFinishedPulling="2026-01-30 11:13:37.416341591 +0000 UTC m=+1167.966217657" observedRunningTime="2026-01-30 11:13:38.782390393 +0000 UTC m=+1169.332266459" watchObservedRunningTime="2026-01-30 11:13:38.792051137 +0000 UTC m=+1169.341927203" Jan 30 11:13:38 crc kubenswrapper[4869]: I0130 11:13:38.876293 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:13:38 crc kubenswrapper[4869]: W0130 11:13:38.889357 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc06f8263_5706_4344_ae5f_86c39b0ab980.slice/crio-b7ce5967af1cdbc5a87693144e99ad8139ea45e70bc8660c872a10e70ff2d495 WatchSource:0}: Error finding container b7ce5967af1cdbc5a87693144e99ad8139ea45e70bc8660c872a10e70ff2d495: Status 404 returned error can't find the container with id b7ce5967af1cdbc5a87693144e99ad8139ea45e70bc8660c872a10e70ff2d495 Jan 30 11:13:39 crc kubenswrapper[4869]: I0130 11:13:39.743551 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c06f8263-5706-4344-ae5f-86c39b0ab980","Type":"ContainerStarted","Data":"9d1babf918b38f92eb376562c9b24a4115d2cc4068b28e9e62137ff937dcb640"} Jan 30 11:13:39 crc kubenswrapper[4869]: I0130 11:13:39.744051 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c06f8263-5706-4344-ae5f-86c39b0ab980","Type":"ContainerStarted","Data":"b7ce5967af1cdbc5a87693144e99ad8139ea45e70bc8660c872a10e70ff2d495"} Jan 30 11:13:40 crc kubenswrapper[4869]: I0130 11:13:40.052197 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 11:13:40 crc kubenswrapper[4869]: I0130 11:13:40.052456 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="b223a1eb-7739-43f7-ab0a-50504f2a902d" containerName="glance-log" containerID="cri-o://b4ee96706f1224907393f481cc9e0b745ed3bb29533ec42623194a5c477450a1" gracePeriod=30 Jan 30 11:13:40 crc kubenswrapper[4869]: I0130 11:13:40.052523 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="b223a1eb-7739-43f7-ab0a-50504f2a902d" containerName="glance-httpd" containerID="cri-o://54d391c10230c336bc6af8fc68de449dccf496502b5d474108d4fc337af820cf" gracePeriod=30 Jan 30 11:13:40 crc kubenswrapper[4869]: I0130 11:13:40.666963 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:13:40 crc kubenswrapper[4869]: I0130 11:13:40.767985 4869 generic.go:334] "Generic (PLEG): container finished" podID="b223a1eb-7739-43f7-ab0a-50504f2a902d" containerID="b4ee96706f1224907393f481cc9e0b745ed3bb29533ec42623194a5c477450a1" exitCode=143 Jan 30 11:13:40 crc kubenswrapper[4869]: I0130 11:13:40.768103 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b223a1eb-7739-43f7-ab0a-50504f2a902d","Type":"ContainerDied","Data":"b4ee96706f1224907393f481cc9e0b745ed3bb29533ec42623194a5c477450a1"} Jan 30 11:13:40 crc kubenswrapper[4869]: I0130 11:13:40.773373 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c06f8263-5706-4344-ae5f-86c39b0ab980","Type":"ContainerStarted","Data":"4f0599e29a9e768b926e60d1720c9416d780482143d58d1518a23fed6d6c1577"} Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.350924 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.500594 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.500665 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-logs\") pod \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.500756 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-24q9g\" (UniqueName: \"kubernetes.io/projected/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-kube-api-access-24q9g\") pod \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.500805 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-public-tls-certs\") pod \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.500827 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-scripts\") pod \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.500941 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-httpd-run\") pod \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.500980 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-combined-ca-bundle\") pod \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.501002 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-config-data\") pod \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.501568 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4" (UID: "6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.505424 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-logs" (OuterVolumeSpecName: "logs") pod "6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4" (UID: "6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.673493 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-scripts" (OuterVolumeSpecName: "scripts") pod "6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4" (UID: "6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.674011 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-scripts\") pod \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\" (UID: \"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4\") " Jan 30 11:13:41 crc kubenswrapper[4869]: W0130 11:13:41.674686 4869 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4/volumes/kubernetes.io~secret/scripts Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.674730 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-scripts" (OuterVolumeSpecName: "scripts") pod "6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4" (UID: "6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.674811 4869 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.674829 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-logs\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.674843 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.675186 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4" (UID: "6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.676972 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-kube-api-access-24q9g" (OuterVolumeSpecName: "kube-api-access-24q9g") pod "6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4" (UID: "6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4"). InnerVolumeSpecName "kube-api-access-24q9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.677792 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4" (UID: "6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.705481 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-config-data" (OuterVolumeSpecName: "config-data") pod "6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4" (UID: "6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.710597 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4" (UID: "6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.741530 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.776914 4869 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.776948 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-24q9g\" (UniqueName: \"kubernetes.io/projected/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-kube-api-access-24q9g\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.776963 4869 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.776978 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.776990 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.815234 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6c54c84574-hxb8h"] Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.815507 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6c54c84574-hxb8h" podUID="a2820db2-7c1c-46d7-9baf-8bf031649668" containerName="neutron-api" containerID="cri-o://f9e32cc6f22599826f2c7ba35fc0e547c20981a4190c6a91c483999252cd7e29" gracePeriod=30 Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.815988 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6c54c84574-hxb8h" podUID="a2820db2-7c1c-46d7-9baf-8bf031649668" containerName="neutron-httpd" containerID="cri-o://9de643e0adb20ada99ce0f5134b6c4728242a55ce93d1ec0e43ce5db03b38852" gracePeriod=30 Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.817623 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c06f8263-5706-4344-ae5f-86c39b0ab980","Type":"ContainerStarted","Data":"abb9df5e4666c0b3734ef2a2d3b2dde13a486675d6cef73821683e29da5b6f76"} Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.826735 4869 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.837233 4869 generic.go:334] "Generic (PLEG): container finished" podID="6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4" containerID="ac3b0f15aa26a1812a34be06ccea93eba16d3091e2d29b3b3ec7f99355ee6277" exitCode=0 Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.837297 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4","Type":"ContainerDied","Data":"ac3b0f15aa26a1812a34be06ccea93eba16d3091e2d29b3b3ec7f99355ee6277"} Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.837333 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4","Type":"ContainerDied","Data":"76aceb35947985fdbc0a402e4cdf131fdddda547ece33bd0aa8bc7a6dac2934e"} Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.837355 4869 scope.go:117] "RemoveContainer" containerID="ac3b0f15aa26a1812a34be06ccea93eba16d3091e2d29b3b3ec7f99355ee6277" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.837370 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.876509 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.878036 4869 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.893130 4869 scope.go:117] "RemoveContainer" containerID="f9873d09f9ac0f070f0f8c68499512935ae1991883fd4cc8fd121f957ada799e" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.894227 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.916641 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 11:13:41 crc kubenswrapper[4869]: E0130 11:13:41.917244 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4" containerName="glance-log" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.917265 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4" containerName="glance-log" Jan 30 11:13:41 crc kubenswrapper[4869]: E0130 11:13:41.917283 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4" containerName="glance-httpd" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.917291 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4" containerName="glance-httpd" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.917546 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4" containerName="glance-httpd" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.917671 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4" containerName="glance-log" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.920410 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.924031 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.924226 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.934878 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.975108 4869 scope.go:117] "RemoveContainer" containerID="ac3b0f15aa26a1812a34be06ccea93eba16d3091e2d29b3b3ec7f99355ee6277" Jan 30 11:13:41 crc kubenswrapper[4869]: E0130 11:13:41.979309 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac3b0f15aa26a1812a34be06ccea93eba16d3091e2d29b3b3ec7f99355ee6277\": container with ID starting with ac3b0f15aa26a1812a34be06ccea93eba16d3091e2d29b3b3ec7f99355ee6277 not found: ID does not exist" containerID="ac3b0f15aa26a1812a34be06ccea93eba16d3091e2d29b3b3ec7f99355ee6277" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.979378 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac3b0f15aa26a1812a34be06ccea93eba16d3091e2d29b3b3ec7f99355ee6277"} err="failed to get container status \"ac3b0f15aa26a1812a34be06ccea93eba16d3091e2d29b3b3ec7f99355ee6277\": rpc error: code = NotFound desc = could not find container \"ac3b0f15aa26a1812a34be06ccea93eba16d3091e2d29b3b3ec7f99355ee6277\": container with ID starting with ac3b0f15aa26a1812a34be06ccea93eba16d3091e2d29b3b3ec7f99355ee6277 not found: ID does not exist" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.979413 4869 scope.go:117] "RemoveContainer" containerID="f9873d09f9ac0f070f0f8c68499512935ae1991883fd4cc8fd121f957ada799e" Jan 30 11:13:41 crc kubenswrapper[4869]: E0130 11:13:41.980379 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9873d09f9ac0f070f0f8c68499512935ae1991883fd4cc8fd121f957ada799e\": container with ID starting with f9873d09f9ac0f070f0f8c68499512935ae1991883fd4cc8fd121f957ada799e not found: ID does not exist" containerID="f9873d09f9ac0f070f0f8c68499512935ae1991883fd4cc8fd121f957ada799e" Jan 30 11:13:41 crc kubenswrapper[4869]: I0130 11:13:41.980492 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9873d09f9ac0f070f0f8c68499512935ae1991883fd4cc8fd121f957ada799e"} err="failed to get container status \"f9873d09f9ac0f070f0f8c68499512935ae1991883fd4cc8fd121f957ada799e\": rpc error: code = NotFound desc = could not find container \"f9873d09f9ac0f070f0f8c68499512935ae1991883fd4cc8fd121f957ada799e\": container with ID starting with f9873d09f9ac0f070f0f8c68499512935ae1991883fd4cc8fd121f957ada799e not found: ID does not exist" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.081565 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2ae8a334-b758-420e-8aae-a3f6437f9816-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.081770 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ae8a334-b758-420e-8aae-a3f6437f9816-logs\") pod \"glance-default-external-api-0\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.081797 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.081845 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssnv2\" (UniqueName: \"kubernetes.io/projected/2ae8a334-b758-420e-8aae-a3f6437f9816-kube-api-access-ssnv2\") pod \"glance-default-external-api-0\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.081871 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ae8a334-b758-420e-8aae-a3f6437f9816-scripts\") pod \"glance-default-external-api-0\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.081895 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ae8a334-b758-420e-8aae-a3f6437f9816-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.081939 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ae8a334-b758-420e-8aae-a3f6437f9816-config-data\") pod \"glance-default-external-api-0\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.081969 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ae8a334-b758-420e-8aae-a3f6437f9816-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.142974 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4" path="/var/lib/kubelet/pods/6501acc1-6bb0-4ed6-8485-f2ca36b7a4f4/volumes" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.183526 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ae8a334-b758-420e-8aae-a3f6437f9816-logs\") pod \"glance-default-external-api-0\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.183579 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.183625 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssnv2\" (UniqueName: \"kubernetes.io/projected/2ae8a334-b758-420e-8aae-a3f6437f9816-kube-api-access-ssnv2\") pod \"glance-default-external-api-0\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.183648 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ae8a334-b758-420e-8aae-a3f6437f9816-scripts\") pod \"glance-default-external-api-0\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.183675 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ae8a334-b758-420e-8aae-a3f6437f9816-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.183701 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ae8a334-b758-420e-8aae-a3f6437f9816-config-data\") pod \"glance-default-external-api-0\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.183760 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ae8a334-b758-420e-8aae-a3f6437f9816-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.183808 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2ae8a334-b758-420e-8aae-a3f6437f9816-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.183948 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.184271 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ae8a334-b758-420e-8aae-a3f6437f9816-logs\") pod \"glance-default-external-api-0\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.184579 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2ae8a334-b758-420e-8aae-a3f6437f9816-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.188579 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ae8a334-b758-420e-8aae-a3f6437f9816-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.189339 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ae8a334-b758-420e-8aae-a3f6437f9816-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.190058 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ae8a334-b758-420e-8aae-a3f6437f9816-config-data\") pod \"glance-default-external-api-0\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.190310 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ae8a334-b758-420e-8aae-a3f6437f9816-scripts\") pod \"glance-default-external-api-0\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.207449 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssnv2\" (UniqueName: \"kubernetes.io/projected/2ae8a334-b758-420e-8aae-a3f6437f9816-kube-api-access-ssnv2\") pod \"glance-default-external-api-0\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.234173 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.240973 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.849304 4869 generic.go:334] "Generic (PLEG): container finished" podID="a2820db2-7c1c-46d7-9baf-8bf031649668" containerID="9de643e0adb20ada99ce0f5134b6c4728242a55ce93d1ec0e43ce5db03b38852" exitCode=0 Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.849389 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c54c84574-hxb8h" event={"ID":"a2820db2-7c1c-46d7-9baf-8bf031649668","Type":"ContainerDied","Data":"9de643e0adb20ada99ce0f5134b6c4728242a55ce93d1ec0e43ce5db03b38852"} Jan 30 11:13:42 crc kubenswrapper[4869]: I0130 11:13:42.875426 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 11:13:42 crc kubenswrapper[4869]: W0130 11:13:42.889112 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2ae8a334_b758_420e_8aae_a3f6437f9816.slice/crio-d2cd7dce626a347e3be5349e2b82b8ec5b0a9fe93254bd37de31e53fb8fdb755 WatchSource:0}: Error finding container d2cd7dce626a347e3be5349e2b82b8ec5b0a9fe93254bd37de31e53fb8fdb755: Status 404 returned error can't find the container with id d2cd7dce626a347e3be5349e2b82b8ec5b0a9fe93254bd37de31e53fb8fdb755 Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.767484 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.841334 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b223a1eb-7739-43f7-ab0a-50504f2a902d-combined-ca-bundle\") pod \"b223a1eb-7739-43f7-ab0a-50504f2a902d\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.841387 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4nlj\" (UniqueName: \"kubernetes.io/projected/b223a1eb-7739-43f7-ab0a-50504f2a902d-kube-api-access-d4nlj\") pod \"b223a1eb-7739-43f7-ab0a-50504f2a902d\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.841414 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b223a1eb-7739-43f7-ab0a-50504f2a902d-internal-tls-certs\") pod \"b223a1eb-7739-43f7-ab0a-50504f2a902d\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.841483 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"b223a1eb-7739-43f7-ab0a-50504f2a902d\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.841615 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b223a1eb-7739-43f7-ab0a-50504f2a902d-config-data\") pod \"b223a1eb-7739-43f7-ab0a-50504f2a902d\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.841648 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b223a1eb-7739-43f7-ab0a-50504f2a902d-logs\") pod \"b223a1eb-7739-43f7-ab0a-50504f2a902d\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.841699 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b223a1eb-7739-43f7-ab0a-50504f2a902d-scripts\") pod \"b223a1eb-7739-43f7-ab0a-50504f2a902d\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.841795 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b223a1eb-7739-43f7-ab0a-50504f2a902d-httpd-run\") pod \"b223a1eb-7739-43f7-ab0a-50504f2a902d\" (UID: \"b223a1eb-7739-43f7-ab0a-50504f2a902d\") " Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.843145 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b223a1eb-7739-43f7-ab0a-50504f2a902d-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b223a1eb-7739-43f7-ab0a-50504f2a902d" (UID: "b223a1eb-7739-43f7-ab0a-50504f2a902d"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.844085 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b223a1eb-7739-43f7-ab0a-50504f2a902d-logs" (OuterVolumeSpecName: "logs") pod "b223a1eb-7739-43f7-ab0a-50504f2a902d" (UID: "b223a1eb-7739-43f7-ab0a-50504f2a902d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.850192 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "b223a1eb-7739-43f7-ab0a-50504f2a902d" (UID: "b223a1eb-7739-43f7-ab0a-50504f2a902d"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.864118 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b223a1eb-7739-43f7-ab0a-50504f2a902d-kube-api-access-d4nlj" (OuterVolumeSpecName: "kube-api-access-d4nlj") pod "b223a1eb-7739-43f7-ab0a-50504f2a902d" (UID: "b223a1eb-7739-43f7-ab0a-50504f2a902d"). InnerVolumeSpecName "kube-api-access-d4nlj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.864174 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b223a1eb-7739-43f7-ab0a-50504f2a902d-scripts" (OuterVolumeSpecName: "scripts") pod "b223a1eb-7739-43f7-ab0a-50504f2a902d" (UID: "b223a1eb-7739-43f7-ab0a-50504f2a902d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.890069 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2ae8a334-b758-420e-8aae-a3f6437f9816","Type":"ContainerStarted","Data":"c8372e95ef80d324e58700717c686cf6517c90804f250faf3d73bbe912f0a6a3"} Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.890420 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2ae8a334-b758-420e-8aae-a3f6437f9816","Type":"ContainerStarted","Data":"d2cd7dce626a347e3be5349e2b82b8ec5b0a9fe93254bd37de31e53fb8fdb755"} Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.907470 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b223a1eb-7739-43f7-ab0a-50504f2a902d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b223a1eb-7739-43f7-ab0a-50504f2a902d" (UID: "b223a1eb-7739-43f7-ab0a-50504f2a902d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.918594 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c06f8263-5706-4344-ae5f-86c39b0ab980","Type":"ContainerStarted","Data":"b1f76eaabe3abb8ead38942f2cff271ee2c6df9117a2b04085ed53e746085389"} Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.918873 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c06f8263-5706-4344-ae5f-86c39b0ab980" containerName="ceilometer-central-agent" containerID="cri-o://9d1babf918b38f92eb376562c9b24a4115d2cc4068b28e9e62137ff937dcb640" gracePeriod=30 Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.918972 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.919160 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b223a1eb-7739-43f7-ab0a-50504f2a902d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b223a1eb-7739-43f7-ab0a-50504f2a902d" (UID: "b223a1eb-7739-43f7-ab0a-50504f2a902d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.919320 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c06f8263-5706-4344-ae5f-86c39b0ab980" containerName="proxy-httpd" containerID="cri-o://b1f76eaabe3abb8ead38942f2cff271ee2c6df9117a2b04085ed53e746085389" gracePeriod=30 Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.919344 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c06f8263-5706-4344-ae5f-86c39b0ab980" containerName="ceilometer-notification-agent" containerID="cri-o://4f0599e29a9e768b926e60d1720c9416d780482143d58d1518a23fed6d6c1577" gracePeriod=30 Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.919463 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c06f8263-5706-4344-ae5f-86c39b0ab980" containerName="sg-core" containerID="cri-o://abb9df5e4666c0b3734ef2a2d3b2dde13a486675d6cef73821683e29da5b6f76" gracePeriod=30 Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.928856 4869 generic.go:334] "Generic (PLEG): container finished" podID="b223a1eb-7739-43f7-ab0a-50504f2a902d" containerID="54d391c10230c336bc6af8fc68de449dccf496502b5d474108d4fc337af820cf" exitCode=0 Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.928894 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b223a1eb-7739-43f7-ab0a-50504f2a902d","Type":"ContainerDied","Data":"54d391c10230c336bc6af8fc68de449dccf496502b5d474108d4fc337af820cf"} Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.928938 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b223a1eb-7739-43f7-ab0a-50504f2a902d","Type":"ContainerDied","Data":"136f9b5fe0ed1ddec63c41b3f600195c16be05df896b8a64f102abe5c323e2f5"} Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.928957 4869 scope.go:117] "RemoveContainer" containerID="54d391c10230c336bc6af8fc68de449dccf496502b5d474108d4fc337af820cf" Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.929098 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.930896 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b223a1eb-7739-43f7-ab0a-50504f2a902d-config-data" (OuterVolumeSpecName: "config-data") pod "b223a1eb-7739-43f7-ab0a-50504f2a902d" (UID: "b223a1eb-7739-43f7-ab0a-50504f2a902d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.944294 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b223a1eb-7739-43f7-ab0a-50504f2a902d-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.944322 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b223a1eb-7739-43f7-ab0a-50504f2a902d-logs\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.944333 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b223a1eb-7739-43f7-ab0a-50504f2a902d-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.944347 4869 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b223a1eb-7739-43f7-ab0a-50504f2a902d-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.944358 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b223a1eb-7739-43f7-ab0a-50504f2a902d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.944382 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4nlj\" (UniqueName: \"kubernetes.io/projected/b223a1eb-7739-43f7-ab0a-50504f2a902d-kube-api-access-d4nlj\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.944395 4869 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b223a1eb-7739-43f7-ab0a-50504f2a902d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.944428 4869 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.948093 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.823382029 podStartE2EDuration="5.948072277s" podCreationTimestamp="2026-01-30 11:13:38 +0000 UTC" firstStartedPulling="2026-01-30 11:13:38.896225734 +0000 UTC m=+1169.446101800" lastFinishedPulling="2026-01-30 11:13:43.020915982 +0000 UTC m=+1173.570792048" observedRunningTime="2026-01-30 11:13:43.94043006 +0000 UTC m=+1174.490306126" watchObservedRunningTime="2026-01-30 11:13:43.948072277 +0000 UTC m=+1174.497948343" Jan 30 11:13:43 crc kubenswrapper[4869]: I0130 11:13:43.976513 4869 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.009636 4869 scope.go:117] "RemoveContainer" containerID="b4ee96706f1224907393f481cc9e0b745ed3bb29533ec42623194a5c477450a1" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.044466 4869 scope.go:117] "RemoveContainer" containerID="54d391c10230c336bc6af8fc68de449dccf496502b5d474108d4fc337af820cf" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.045679 4869 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:44 crc kubenswrapper[4869]: E0130 11:13:44.045763 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54d391c10230c336bc6af8fc68de449dccf496502b5d474108d4fc337af820cf\": container with ID starting with 54d391c10230c336bc6af8fc68de449dccf496502b5d474108d4fc337af820cf not found: ID does not exist" containerID="54d391c10230c336bc6af8fc68de449dccf496502b5d474108d4fc337af820cf" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.045786 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54d391c10230c336bc6af8fc68de449dccf496502b5d474108d4fc337af820cf"} err="failed to get container status \"54d391c10230c336bc6af8fc68de449dccf496502b5d474108d4fc337af820cf\": rpc error: code = NotFound desc = could not find container \"54d391c10230c336bc6af8fc68de449dccf496502b5d474108d4fc337af820cf\": container with ID starting with 54d391c10230c336bc6af8fc68de449dccf496502b5d474108d4fc337af820cf not found: ID does not exist" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.045807 4869 scope.go:117] "RemoveContainer" containerID="b4ee96706f1224907393f481cc9e0b745ed3bb29533ec42623194a5c477450a1" Jan 30 11:13:44 crc kubenswrapper[4869]: E0130 11:13:44.046005 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4ee96706f1224907393f481cc9e0b745ed3bb29533ec42623194a5c477450a1\": container with ID starting with b4ee96706f1224907393f481cc9e0b745ed3bb29533ec42623194a5c477450a1 not found: ID does not exist" containerID="b4ee96706f1224907393f481cc9e0b745ed3bb29533ec42623194a5c477450a1" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.046030 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4ee96706f1224907393f481cc9e0b745ed3bb29533ec42623194a5c477450a1"} err="failed to get container status \"b4ee96706f1224907393f481cc9e0b745ed3bb29533ec42623194a5c477450a1\": rpc error: code = NotFound desc = could not find container \"b4ee96706f1224907393f481cc9e0b745ed3bb29533ec42623194a5c477450a1\": container with ID starting with b4ee96706f1224907393f481cc9e0b745ed3bb29533ec42623194a5c477450a1 not found: ID does not exist" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.269727 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.279324 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.304784 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-46cwq"] Jan 30 11:13:44 crc kubenswrapper[4869]: E0130 11:13:44.305440 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b223a1eb-7739-43f7-ab0a-50504f2a902d" containerName="glance-httpd" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.305459 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b223a1eb-7739-43f7-ab0a-50504f2a902d" containerName="glance-httpd" Jan 30 11:13:44 crc kubenswrapper[4869]: E0130 11:13:44.305496 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b223a1eb-7739-43f7-ab0a-50504f2a902d" containerName="glance-log" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.305503 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b223a1eb-7739-43f7-ab0a-50504f2a902d" containerName="glance-log" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.305813 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b223a1eb-7739-43f7-ab0a-50504f2a902d" containerName="glance-log" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.305829 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b223a1eb-7739-43f7-ab0a-50504f2a902d" containerName="glance-httpd" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.306573 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-46cwq" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.321676 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.324557 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-46cwq"] Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.324670 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.368250 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.368390 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.400762 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.465756 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec34c29c-665f-465a-99d0-c342aca2cf14-logs\") pod \"glance-default-internal-api-0\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.466066 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.466098 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ec34c29c-665f-465a-99d0-c342aca2cf14-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.466154 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.466174 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.466213 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.466231 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rg7z\" (UniqueName: \"kubernetes.io/projected/285e9203-3b0b-4a52-8464-1019a682fd9d-kube-api-access-2rg7z\") pod \"nova-api-db-create-46cwq\" (UID: \"285e9203-3b0b-4a52-8464-1019a682fd9d\") " pod="openstack/nova-api-db-create-46cwq" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.488053 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.488104 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9dzr\" (UniqueName: \"kubernetes.io/projected/ec34c29c-665f-465a-99d0-c342aca2cf14-kube-api-access-t9dzr\") pod \"glance-default-internal-api-0\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.488144 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/285e9203-3b0b-4a52-8464-1019a682fd9d-operator-scripts\") pod \"nova-api-db-create-46cwq\" (UID: \"285e9203-3b0b-4a52-8464-1019a682fd9d\") " pod="openstack/nova-api-db-create-46cwq" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.508363 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-zsp2z"] Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.510591 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-zsp2z" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.530489 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-zsp2z"] Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.593122 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec34c29c-665f-465a-99d0-c342aca2cf14-logs\") pod \"glance-default-internal-api-0\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.593175 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.593207 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ec34c29c-665f-465a-99d0-c342aca2cf14-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.593245 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.593267 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.593300 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.593319 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rg7z\" (UniqueName: \"kubernetes.io/projected/285e9203-3b0b-4a52-8464-1019a682fd9d-kube-api-access-2rg7z\") pod \"nova-api-db-create-46cwq\" (UID: \"285e9203-3b0b-4a52-8464-1019a682fd9d\") " pod="openstack/nova-api-db-create-46cwq" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.594134 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.594166 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9dzr\" (UniqueName: \"kubernetes.io/projected/ec34c29c-665f-465a-99d0-c342aca2cf14-kube-api-access-t9dzr\") pod \"glance-default-internal-api-0\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.594221 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/285e9203-3b0b-4a52-8464-1019a682fd9d-operator-scripts\") pod \"nova-api-db-create-46cwq\" (UID: \"285e9203-3b0b-4a52-8464-1019a682fd9d\") " pod="openstack/nova-api-db-create-46cwq" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.597738 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/285e9203-3b0b-4a52-8464-1019a682fd9d-operator-scripts\") pod \"nova-api-db-create-46cwq\" (UID: \"285e9203-3b0b-4a52-8464-1019a682fd9d\") " pod="openstack/nova-api-db-create-46cwq" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.602452 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.602826 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec34c29c-665f-465a-99d0-c342aca2cf14-logs\") pod \"glance-default-internal-api-0\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.603753 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.603800 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ec34c29c-665f-465a-99d0-c342aca2cf14-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.604825 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.617011 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.617937 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-sm7m6"] Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.619350 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-sm7m6" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.640353 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.643861 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rg7z\" (UniqueName: \"kubernetes.io/projected/285e9203-3b0b-4a52-8464-1019a682fd9d-kube-api-access-2rg7z\") pod \"nova-api-db-create-46cwq\" (UID: \"285e9203-3b0b-4a52-8464-1019a682fd9d\") " pod="openstack/nova-api-db-create-46cwq" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.677731 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9dzr\" (UniqueName: \"kubernetes.io/projected/ec34c29c-665f-465a-99d0-c342aca2cf14-kube-api-access-t9dzr\") pod \"glance-default-internal-api-0\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.702803 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-sm7m6"] Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.724048 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/466decd8-bc59-452e-8c91-03d08f776138-operator-scripts\") pod \"nova-cell1-db-create-sm7m6\" (UID: \"466decd8-bc59-452e-8c91-03d08f776138\") " pod="openstack/nova-cell1-db-create-sm7m6" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.724668 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76a2d071-f2d2-418f-be6e-0488fa1dd3d8-operator-scripts\") pod \"nova-cell0-db-create-zsp2z\" (UID: \"76a2d071-f2d2-418f-be6e-0488fa1dd3d8\") " pod="openstack/nova-cell0-db-create-zsp2z" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.724727 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7fgl\" (UniqueName: \"kubernetes.io/projected/76a2d071-f2d2-418f-be6e-0488fa1dd3d8-kube-api-access-m7fgl\") pod \"nova-cell0-db-create-zsp2z\" (UID: \"76a2d071-f2d2-418f-be6e-0488fa1dd3d8\") " pod="openstack/nova-cell0-db-create-zsp2z" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.724753 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dx58w\" (UniqueName: \"kubernetes.io/projected/466decd8-bc59-452e-8c91-03d08f776138-kube-api-access-dx58w\") pod \"nova-cell1-db-create-sm7m6\" (UID: \"466decd8-bc59-452e-8c91-03d08f776138\") " pod="openstack/nova-cell1-db-create-sm7m6" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.726097 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.731570 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-86a6-account-create-update-44pzz"] Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.733821 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-46cwq" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.738887 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-86a6-account-create-update-44pzz" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.745276 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.748789 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-86a6-account-create-update-44pzz"] Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.767585 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.829331 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76a2d071-f2d2-418f-be6e-0488fa1dd3d8-operator-scripts\") pod \"nova-cell0-db-create-zsp2z\" (UID: \"76a2d071-f2d2-418f-be6e-0488fa1dd3d8\") " pod="openstack/nova-cell0-db-create-zsp2z" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.829380 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7fgl\" (UniqueName: \"kubernetes.io/projected/76a2d071-f2d2-418f-be6e-0488fa1dd3d8-kube-api-access-m7fgl\") pod \"nova-cell0-db-create-zsp2z\" (UID: \"76a2d071-f2d2-418f-be6e-0488fa1dd3d8\") " pod="openstack/nova-cell0-db-create-zsp2z" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.829401 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dx58w\" (UniqueName: \"kubernetes.io/projected/466decd8-bc59-452e-8c91-03d08f776138-kube-api-access-dx58w\") pod \"nova-cell1-db-create-sm7m6\" (UID: \"466decd8-bc59-452e-8c91-03d08f776138\") " pod="openstack/nova-cell1-db-create-sm7m6" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.829427 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/998d0af4-dd1e-48b0-9b87-c142eb5949f2-operator-scripts\") pod \"nova-api-86a6-account-create-update-44pzz\" (UID: \"998d0af4-dd1e-48b0-9b87-c142eb5949f2\") " pod="openstack/nova-api-86a6-account-create-update-44pzz" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.829454 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rqbz\" (UniqueName: \"kubernetes.io/projected/998d0af4-dd1e-48b0-9b87-c142eb5949f2-kube-api-access-4rqbz\") pod \"nova-api-86a6-account-create-update-44pzz\" (UID: \"998d0af4-dd1e-48b0-9b87-c142eb5949f2\") " pod="openstack/nova-api-86a6-account-create-update-44pzz" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.829546 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/466decd8-bc59-452e-8c91-03d08f776138-operator-scripts\") pod \"nova-cell1-db-create-sm7m6\" (UID: \"466decd8-bc59-452e-8c91-03d08f776138\") " pod="openstack/nova-cell1-db-create-sm7m6" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.830453 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/466decd8-bc59-452e-8c91-03d08f776138-operator-scripts\") pod \"nova-cell1-db-create-sm7m6\" (UID: \"466decd8-bc59-452e-8c91-03d08f776138\") " pod="openstack/nova-cell1-db-create-sm7m6" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.830884 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76a2d071-f2d2-418f-be6e-0488fa1dd3d8-operator-scripts\") pod \"nova-cell0-db-create-zsp2z\" (UID: \"76a2d071-f2d2-418f-be6e-0488fa1dd3d8\") " pod="openstack/nova-cell0-db-create-zsp2z" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.840202 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-8662-account-create-update-nq7mx"] Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.841655 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-8662-account-create-update-nq7mx" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.844304 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.851696 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dx58w\" (UniqueName: \"kubernetes.io/projected/466decd8-bc59-452e-8c91-03d08f776138-kube-api-access-dx58w\") pod \"nova-cell1-db-create-sm7m6\" (UID: \"466decd8-bc59-452e-8c91-03d08f776138\") " pod="openstack/nova-cell1-db-create-sm7m6" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.854404 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7fgl\" (UniqueName: \"kubernetes.io/projected/76a2d071-f2d2-418f-be6e-0488fa1dd3d8-kube-api-access-m7fgl\") pod \"nova-cell0-db-create-zsp2z\" (UID: \"76a2d071-f2d2-418f-be6e-0488fa1dd3d8\") " pod="openstack/nova-cell0-db-create-zsp2z" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.876768 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-8662-account-create-update-nq7mx"] Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.930708 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-sm7m6" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.932666 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/998d0af4-dd1e-48b0-9b87-c142eb5949f2-operator-scripts\") pod \"nova-api-86a6-account-create-update-44pzz\" (UID: \"998d0af4-dd1e-48b0-9b87-c142eb5949f2\") " pod="openstack/nova-api-86a6-account-create-update-44pzz" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.934299 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/998d0af4-dd1e-48b0-9b87-c142eb5949f2-operator-scripts\") pod \"nova-api-86a6-account-create-update-44pzz\" (UID: \"998d0af4-dd1e-48b0-9b87-c142eb5949f2\") " pod="openstack/nova-api-86a6-account-create-update-44pzz" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.943817 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7f40e387-b913-4f1b-9055-cacff2507f9b-operator-scripts\") pod \"nova-cell0-8662-account-create-update-nq7mx\" (UID: \"7f40e387-b913-4f1b-9055-cacff2507f9b\") " pod="openstack/nova-cell0-8662-account-create-update-nq7mx" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.943936 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rqbz\" (UniqueName: \"kubernetes.io/projected/998d0af4-dd1e-48b0-9b87-c142eb5949f2-kube-api-access-4rqbz\") pod \"nova-api-86a6-account-create-update-44pzz\" (UID: \"998d0af4-dd1e-48b0-9b87-c142eb5949f2\") " pod="openstack/nova-api-86a6-account-create-update-44pzz" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.943985 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r54xn\" (UniqueName: \"kubernetes.io/projected/7f40e387-b913-4f1b-9055-cacff2507f9b-kube-api-access-r54xn\") pod \"nova-cell0-8662-account-create-update-nq7mx\" (UID: \"7f40e387-b913-4f1b-9055-cacff2507f9b\") " pod="openstack/nova-cell0-8662-account-create-update-nq7mx" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.966619 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rqbz\" (UniqueName: \"kubernetes.io/projected/998d0af4-dd1e-48b0-9b87-c142eb5949f2-kube-api-access-4rqbz\") pod \"nova-api-86a6-account-create-update-44pzz\" (UID: \"998d0af4-dd1e-48b0-9b87-c142eb5949f2\") " pod="openstack/nova-api-86a6-account-create-update-44pzz" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.970389 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-86a6-account-create-update-44pzz" Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.983180 4869 generic.go:334] "Generic (PLEG): container finished" podID="c06f8263-5706-4344-ae5f-86c39b0ab980" containerID="b1f76eaabe3abb8ead38942f2cff271ee2c6df9117a2b04085ed53e746085389" exitCode=0 Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.983207 4869 generic.go:334] "Generic (PLEG): container finished" podID="c06f8263-5706-4344-ae5f-86c39b0ab980" containerID="abb9df5e4666c0b3734ef2a2d3b2dde13a486675d6cef73821683e29da5b6f76" exitCode=2 Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.983218 4869 generic.go:334] "Generic (PLEG): container finished" podID="c06f8263-5706-4344-ae5f-86c39b0ab980" containerID="4f0599e29a9e768b926e60d1720c9416d780482143d58d1518a23fed6d6c1577" exitCode=0 Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.983279 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c06f8263-5706-4344-ae5f-86c39b0ab980","Type":"ContainerDied","Data":"b1f76eaabe3abb8ead38942f2cff271ee2c6df9117a2b04085ed53e746085389"} Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.983307 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c06f8263-5706-4344-ae5f-86c39b0ab980","Type":"ContainerDied","Data":"abb9df5e4666c0b3734ef2a2d3b2dde13a486675d6cef73821683e29da5b6f76"} Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.983318 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c06f8263-5706-4344-ae5f-86c39b0ab980","Type":"ContainerDied","Data":"4f0599e29a9e768b926e60d1720c9416d780482143d58d1518a23fed6d6c1577"} Jan 30 11:13:44 crc kubenswrapper[4869]: I0130 11:13:44.992683 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2ae8a334-b758-420e-8aae-a3f6437f9816","Type":"ContainerStarted","Data":"75cb85d47c4f23763e64b6970bb9222234b6d481a8bbac78888a76d4dd1f8613"} Jan 30 11:13:45 crc kubenswrapper[4869]: I0130 11:13:45.047107 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-c83f-account-create-update-hffbp"] Jan 30 11:13:45 crc kubenswrapper[4869]: I0130 11:13:45.048545 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-c83f-account-create-update-hffbp" Jan 30 11:13:45 crc kubenswrapper[4869]: I0130 11:13:45.051943 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7f40e387-b913-4f1b-9055-cacff2507f9b-operator-scripts\") pod \"nova-cell0-8662-account-create-update-nq7mx\" (UID: \"7f40e387-b913-4f1b-9055-cacff2507f9b\") " pod="openstack/nova-cell0-8662-account-create-update-nq7mx" Jan 30 11:13:45 crc kubenswrapper[4869]: I0130 11:13:45.051998 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r54xn\" (UniqueName: \"kubernetes.io/projected/7f40e387-b913-4f1b-9055-cacff2507f9b-kube-api-access-r54xn\") pod \"nova-cell0-8662-account-create-update-nq7mx\" (UID: \"7f40e387-b913-4f1b-9055-cacff2507f9b\") " pod="openstack/nova-cell0-8662-account-create-update-nq7mx" Jan 30 11:13:45 crc kubenswrapper[4869]: I0130 11:13:45.054959 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7f40e387-b913-4f1b-9055-cacff2507f9b-operator-scripts\") pod \"nova-cell0-8662-account-create-update-nq7mx\" (UID: \"7f40e387-b913-4f1b-9055-cacff2507f9b\") " pod="openstack/nova-cell0-8662-account-create-update-nq7mx" Jan 30 11:13:45 crc kubenswrapper[4869]: I0130 11:13:45.056472 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Jan 30 11:13:45 crc kubenswrapper[4869]: I0130 11:13:45.066760 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.066738407 podStartE2EDuration="4.066738407s" podCreationTimestamp="2026-01-30 11:13:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:13:45.037073175 +0000 UTC m=+1175.586949241" watchObservedRunningTime="2026-01-30 11:13:45.066738407 +0000 UTC m=+1175.616614473" Jan 30 11:13:45 crc kubenswrapper[4869]: I0130 11:13:45.072935 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-c83f-account-create-update-hffbp"] Jan 30 11:13:45 crc kubenswrapper[4869]: I0130 11:13:45.077893 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r54xn\" (UniqueName: \"kubernetes.io/projected/7f40e387-b913-4f1b-9055-cacff2507f9b-kube-api-access-r54xn\") pod \"nova-cell0-8662-account-create-update-nq7mx\" (UID: \"7f40e387-b913-4f1b-9055-cacff2507f9b\") " pod="openstack/nova-cell0-8662-account-create-update-nq7mx" Jan 30 11:13:45 crc kubenswrapper[4869]: I0130 11:13:45.145960 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-zsp2z" Jan 30 11:13:45 crc kubenswrapper[4869]: I0130 11:13:45.154246 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4-operator-scripts\") pod \"nova-cell1-c83f-account-create-update-hffbp\" (UID: \"00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4\") " pod="openstack/nova-cell1-c83f-account-create-update-hffbp" Jan 30 11:13:45 crc kubenswrapper[4869]: I0130 11:13:45.154284 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jgth\" (UniqueName: \"kubernetes.io/projected/00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4-kube-api-access-9jgth\") pod \"nova-cell1-c83f-account-create-update-hffbp\" (UID: \"00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4\") " pod="openstack/nova-cell1-c83f-account-create-update-hffbp" Jan 30 11:13:45 crc kubenswrapper[4869]: I0130 11:13:45.256551 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4-operator-scripts\") pod \"nova-cell1-c83f-account-create-update-hffbp\" (UID: \"00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4\") " pod="openstack/nova-cell1-c83f-account-create-update-hffbp" Jan 30 11:13:45 crc kubenswrapper[4869]: I0130 11:13:45.256606 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jgth\" (UniqueName: \"kubernetes.io/projected/00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4-kube-api-access-9jgth\") pod \"nova-cell1-c83f-account-create-update-hffbp\" (UID: \"00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4\") " pod="openstack/nova-cell1-c83f-account-create-update-hffbp" Jan 30 11:13:45 crc kubenswrapper[4869]: I0130 11:13:45.257602 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4-operator-scripts\") pod \"nova-cell1-c83f-account-create-update-hffbp\" (UID: \"00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4\") " pod="openstack/nova-cell1-c83f-account-create-update-hffbp" Jan 30 11:13:45 crc kubenswrapper[4869]: I0130 11:13:45.278758 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jgth\" (UniqueName: \"kubernetes.io/projected/00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4-kube-api-access-9jgth\") pod \"nova-cell1-c83f-account-create-update-hffbp\" (UID: \"00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4\") " pod="openstack/nova-cell1-c83f-account-create-update-hffbp" Jan 30 11:13:45 crc kubenswrapper[4869]: I0130 11:13:45.289991 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-8662-account-create-update-nq7mx" Jan 30 11:13:45 crc kubenswrapper[4869]: I0130 11:13:45.340498 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-46cwq"] Jan 30 11:13:45 crc kubenswrapper[4869]: I0130 11:13:45.392966 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-c83f-account-create-update-hffbp" Jan 30 11:13:45 crc kubenswrapper[4869]: I0130 11:13:45.552943 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 11:13:45 crc kubenswrapper[4869]: W0130 11:13:45.566441 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec34c29c_665f_465a_99d0_c342aca2cf14.slice/crio-cf5240fbd9e31e278a86a8029cc43333cf57bd4501f7d0846360eeeec2d69c90 WatchSource:0}: Error finding container cf5240fbd9e31e278a86a8029cc43333cf57bd4501f7d0846360eeeec2d69c90: Status 404 returned error can't find the container with id cf5240fbd9e31e278a86a8029cc43333cf57bd4501f7d0846360eeeec2d69c90 Jan 30 11:13:45 crc kubenswrapper[4869]: I0130 11:13:45.793087 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-86a6-account-create-update-44pzz"] Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.018798 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ec34c29c-665f-465a-99d0-c342aca2cf14","Type":"ContainerStarted","Data":"cf5240fbd9e31e278a86a8029cc43333cf57bd4501f7d0846360eeeec2d69c90"} Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.022295 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-86a6-account-create-update-44pzz" event={"ID":"998d0af4-dd1e-48b0-9b87-c142eb5949f2","Type":"ContainerStarted","Data":"90c5a88f7899106ff7dd723d56a0b551a3c7a9b358c5a489f1bd79ca1bff1252"} Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.033261 4869 generic.go:334] "Generic (PLEG): container finished" podID="a2820db2-7c1c-46d7-9baf-8bf031649668" containerID="f9e32cc6f22599826f2c7ba35fc0e547c20981a4190c6a91c483999252cd7e29" exitCode=0 Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.033360 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c54c84574-hxb8h" event={"ID":"a2820db2-7c1c-46d7-9baf-8bf031649668","Type":"ContainerDied","Data":"f9e32cc6f22599826f2c7ba35fc0e547c20981a4190c6a91c483999252cd7e29"} Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.033392 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c54c84574-hxb8h" event={"ID":"a2820db2-7c1c-46d7-9baf-8bf031649668","Type":"ContainerDied","Data":"1d70180f54cd9cd737060df78c0fb3687e66b3d7d89cfd76caff5c9d291cf815"} Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.033405 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1d70180f54cd9cd737060df78c0fb3687e66b3d7d89cfd76caff5c9d291cf815" Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.041526 4869 generic.go:334] "Generic (PLEG): container finished" podID="285e9203-3b0b-4a52-8464-1019a682fd9d" containerID="ec3119367a64d160d3d0df8968c569ef2ac842f41ee741d524838930b96fba12" exitCode=0 Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.047973 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-46cwq" event={"ID":"285e9203-3b0b-4a52-8464-1019a682fd9d","Type":"ContainerDied","Data":"ec3119367a64d160d3d0df8968c569ef2ac842f41ee741d524838930b96fba12"} Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.048015 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-46cwq" event={"ID":"285e9203-3b0b-4a52-8464-1019a682fd9d","Type":"ContainerStarted","Data":"39fc65120531aac7b25c235746af39b9893997ba20c505db8c474cef7f4e52f5"} Jan 30 11:13:46 crc kubenswrapper[4869]: E0130 11:13:46.072648 4869 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod285e9203_3b0b_4a52_8464_1019a682fd9d.slice/crio-ec3119367a64d160d3d0df8968c569ef2ac842f41ee741d524838930b96fba12.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod285e9203_3b0b_4a52_8464_1019a682fd9d.slice/crio-conmon-ec3119367a64d160d3d0df8968c569ef2ac842f41ee741d524838930b96fba12.scope\": RecentStats: unable to find data in memory cache]" Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.085338 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6c54c84574-hxb8h" Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.156859 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b223a1eb-7739-43f7-ab0a-50504f2a902d" path="/var/lib/kubelet/pods/b223a1eb-7739-43f7-ab0a-50504f2a902d/volumes" Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.254369 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.287900 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a2820db2-7c1c-46d7-9baf-8bf031649668-config\") pod \"a2820db2-7c1c-46d7-9baf-8bf031649668\" (UID: \"a2820db2-7c1c-46d7-9baf-8bf031649668\") " Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.288166 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2820db2-7c1c-46d7-9baf-8bf031649668-combined-ca-bundle\") pod \"a2820db2-7c1c-46d7-9baf-8bf031649668\" (UID: \"a2820db2-7c1c-46d7-9baf-8bf031649668\") " Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.288212 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a2820db2-7c1c-46d7-9baf-8bf031649668-httpd-config\") pod \"a2820db2-7c1c-46d7-9baf-8bf031649668\" (UID: \"a2820db2-7c1c-46d7-9baf-8bf031649668\") " Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.288276 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2820db2-7c1c-46d7-9baf-8bf031649668-ovndb-tls-certs\") pod \"a2820db2-7c1c-46d7-9baf-8bf031649668\" (UID: \"a2820db2-7c1c-46d7-9baf-8bf031649668\") " Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.288393 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-66kc4\" (UniqueName: \"kubernetes.io/projected/a2820db2-7c1c-46d7-9baf-8bf031649668-kube-api-access-66kc4\") pod \"a2820db2-7c1c-46d7-9baf-8bf031649668\" (UID: \"a2820db2-7c1c-46d7-9baf-8bf031649668\") " Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.306418 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2820db2-7c1c-46d7-9baf-8bf031649668-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "a2820db2-7c1c-46d7-9baf-8bf031649668" (UID: "a2820db2-7c1c-46d7-9baf-8bf031649668"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.307572 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-zsp2z"] Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.307626 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2820db2-7c1c-46d7-9baf-8bf031649668-kube-api-access-66kc4" (OuterVolumeSpecName: "kube-api-access-66kc4") pod "a2820db2-7c1c-46d7-9baf-8bf031649668" (UID: "a2820db2-7c1c-46d7-9baf-8bf031649668"). InnerVolumeSpecName "kube-api-access-66kc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.307698 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.370793 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-sm7m6"] Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.387601 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-8662-account-create-update-nq7mx"] Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.391488 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-c83f-account-create-update-hffbp"] Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.396418 4869 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a2820db2-7c1c-46d7-9baf-8bf031649668-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.396777 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-66kc4\" (UniqueName: \"kubernetes.io/projected/a2820db2-7c1c-46d7-9baf-8bf031649668-kube-api-access-66kc4\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.425898 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2820db2-7c1c-46d7-9baf-8bf031649668-config" (OuterVolumeSpecName: "config") pod "a2820db2-7c1c-46d7-9baf-8bf031649668" (UID: "a2820db2-7c1c-46d7-9baf-8bf031649668"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.503138 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/a2820db2-7c1c-46d7-9baf-8bf031649668-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.525381 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2820db2-7c1c-46d7-9baf-8bf031649668-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a2820db2-7c1c-46d7-9baf-8bf031649668" (UID: "a2820db2-7c1c-46d7-9baf-8bf031649668"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.539204 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2820db2-7c1c-46d7-9baf-8bf031649668-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "a2820db2-7c1c-46d7-9baf-8bf031649668" (UID: "a2820db2-7c1c-46d7-9baf-8bf031649668"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.604501 4869 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2820db2-7c1c-46d7-9baf-8bf031649668-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:46 crc kubenswrapper[4869]: I0130 11:13:46.604796 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2820db2-7c1c-46d7-9baf-8bf031649668-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:47 crc kubenswrapper[4869]: I0130 11:13:47.075258 4869 generic.go:334] "Generic (PLEG): container finished" podID="76a2d071-f2d2-418f-be6e-0488fa1dd3d8" containerID="6ff5e8125956cbc55ddecf51fea9c1dd9fdc0fb71f0b06392215227e5d522a69" exitCode=0 Jan 30 11:13:47 crc kubenswrapper[4869]: I0130 11:13:47.075317 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-zsp2z" event={"ID":"76a2d071-f2d2-418f-be6e-0488fa1dd3d8","Type":"ContainerDied","Data":"6ff5e8125956cbc55ddecf51fea9c1dd9fdc0fb71f0b06392215227e5d522a69"} Jan 30 11:13:47 crc kubenswrapper[4869]: I0130 11:13:47.075347 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-zsp2z" event={"ID":"76a2d071-f2d2-418f-be6e-0488fa1dd3d8","Type":"ContainerStarted","Data":"280725e571bf49690d0583c1a378c49ebce5c9a93130a43f54c74b977338d9fe"} Jan 30 11:13:47 crc kubenswrapper[4869]: I0130 11:13:47.086432 4869 generic.go:334] "Generic (PLEG): container finished" podID="00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4" containerID="45865b93886b8857980054d339f929f43d0d91edc599cbbafd11a60132b05945" exitCode=0 Jan 30 11:13:47 crc kubenswrapper[4869]: I0130 11:13:47.086565 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-c83f-account-create-update-hffbp" event={"ID":"00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4","Type":"ContainerDied","Data":"45865b93886b8857980054d339f929f43d0d91edc599cbbafd11a60132b05945"} Jan 30 11:13:47 crc kubenswrapper[4869]: I0130 11:13:47.086600 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-c83f-account-create-update-hffbp" event={"ID":"00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4","Type":"ContainerStarted","Data":"a55c231295365363074b64d8b23b4754e7dbf9593bb90c0aa1231f33d1612f37"} Jan 30 11:13:47 crc kubenswrapper[4869]: I0130 11:13:47.094554 4869 generic.go:334] "Generic (PLEG): container finished" podID="998d0af4-dd1e-48b0-9b87-c142eb5949f2" containerID="1abc3e2a19875a5d076b39d5251995e37da6b95e6a34f3616e85a54e3f5d1c08" exitCode=0 Jan 30 11:13:47 crc kubenswrapper[4869]: I0130 11:13:47.094671 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-86a6-account-create-update-44pzz" event={"ID":"998d0af4-dd1e-48b0-9b87-c142eb5949f2","Type":"ContainerDied","Data":"1abc3e2a19875a5d076b39d5251995e37da6b95e6a34f3616e85a54e3f5d1c08"} Jan 30 11:13:47 crc kubenswrapper[4869]: I0130 11:13:47.097823 4869 generic.go:334] "Generic (PLEG): container finished" podID="7f40e387-b913-4f1b-9055-cacff2507f9b" containerID="8ef795eee6cad526331d1ec40516e7e05de95f60e7c7819fd56bcb9dfd8c5e61" exitCode=0 Jan 30 11:13:47 crc kubenswrapper[4869]: I0130 11:13:47.097953 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-8662-account-create-update-nq7mx" event={"ID":"7f40e387-b913-4f1b-9055-cacff2507f9b","Type":"ContainerDied","Data":"8ef795eee6cad526331d1ec40516e7e05de95f60e7c7819fd56bcb9dfd8c5e61"} Jan 30 11:13:47 crc kubenswrapper[4869]: I0130 11:13:47.097988 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-8662-account-create-update-nq7mx" event={"ID":"7f40e387-b913-4f1b-9055-cacff2507f9b","Type":"ContainerStarted","Data":"53f61a20e7f42d82a7c1067b934962a9b52bd1b33d32554763c71e04e0f20dce"} Jan 30 11:13:47 crc kubenswrapper[4869]: I0130 11:13:47.110151 4869 generic.go:334] "Generic (PLEG): container finished" podID="466decd8-bc59-452e-8c91-03d08f776138" containerID="ace5b38b794c9cefda59f56dfa4f520cafde0d2f1d086b9bf339aaf0082fc0ff" exitCode=0 Jan 30 11:13:47 crc kubenswrapper[4869]: I0130 11:13:47.110207 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-sm7m6" event={"ID":"466decd8-bc59-452e-8c91-03d08f776138","Type":"ContainerDied","Data":"ace5b38b794c9cefda59f56dfa4f520cafde0d2f1d086b9bf339aaf0082fc0ff"} Jan 30 11:13:47 crc kubenswrapper[4869]: I0130 11:13:47.112228 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-sm7m6" event={"ID":"466decd8-bc59-452e-8c91-03d08f776138","Type":"ContainerStarted","Data":"be61490a9c45319180350fa65b4288b50757b5157ba2f8749005fbfddc2798fa"} Jan 30 11:13:47 crc kubenswrapper[4869]: I0130 11:13:47.116573 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6c54c84574-hxb8h" Jan 30 11:13:47 crc kubenswrapper[4869]: I0130 11:13:47.116632 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ec34c29c-665f-465a-99d0-c342aca2cf14","Type":"ContainerStarted","Data":"ecbb01cd1bd2c82e2943bf2f2101e86a6978d1af67f8df56a057053148d323ab"} Jan 30 11:13:47 crc kubenswrapper[4869]: I0130 11:13:47.203894 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6c54c84574-hxb8h"] Jan 30 11:13:47 crc kubenswrapper[4869]: I0130 11:13:47.213017 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-6c54c84574-hxb8h"] Jan 30 11:13:47 crc kubenswrapper[4869]: I0130 11:13:47.515353 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-46cwq" Jan 30 11:13:47 crc kubenswrapper[4869]: I0130 11:13:47.528899 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/285e9203-3b0b-4a52-8464-1019a682fd9d-operator-scripts\") pod \"285e9203-3b0b-4a52-8464-1019a682fd9d\" (UID: \"285e9203-3b0b-4a52-8464-1019a682fd9d\") " Jan 30 11:13:47 crc kubenswrapper[4869]: I0130 11:13:47.529198 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2rg7z\" (UniqueName: \"kubernetes.io/projected/285e9203-3b0b-4a52-8464-1019a682fd9d-kube-api-access-2rg7z\") pod \"285e9203-3b0b-4a52-8464-1019a682fd9d\" (UID: \"285e9203-3b0b-4a52-8464-1019a682fd9d\") " Jan 30 11:13:47 crc kubenswrapper[4869]: I0130 11:13:47.530360 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/285e9203-3b0b-4a52-8464-1019a682fd9d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "285e9203-3b0b-4a52-8464-1019a682fd9d" (UID: "285e9203-3b0b-4a52-8464-1019a682fd9d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:13:47 crc kubenswrapper[4869]: I0130 11:13:47.576012 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/285e9203-3b0b-4a52-8464-1019a682fd9d-kube-api-access-2rg7z" (OuterVolumeSpecName: "kube-api-access-2rg7z") pod "285e9203-3b0b-4a52-8464-1019a682fd9d" (UID: "285e9203-3b0b-4a52-8464-1019a682fd9d"). InnerVolumeSpecName "kube-api-access-2rg7z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:13:47 crc kubenswrapper[4869]: I0130 11:13:47.632102 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2rg7z\" (UniqueName: \"kubernetes.io/projected/285e9203-3b0b-4a52-8464-1019a682fd9d-kube-api-access-2rg7z\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:47 crc kubenswrapper[4869]: I0130 11:13:47.632158 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/285e9203-3b0b-4a52-8464-1019a682fd9d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:48 crc kubenswrapper[4869]: I0130 11:13:48.021076 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:48 crc kubenswrapper[4869]: I0130 11:13:48.045470 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:13:48 crc kubenswrapper[4869]: I0130 11:13:48.102194 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-7d5fb468b8-g8bf7"] Jan 30 11:13:48 crc kubenswrapper[4869]: I0130 11:13:48.102829 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-7d5fb468b8-g8bf7" podUID="ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20" containerName="placement-log" containerID="cri-o://8aeef19a30b1cb750ef8efe5876d1ee30611d8c32a2c27a0ced2835a322d663b" gracePeriod=30 Jan 30 11:13:48 crc kubenswrapper[4869]: I0130 11:13:48.103328 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-7d5fb468b8-g8bf7" podUID="ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20" containerName="placement-api" containerID="cri-o://404eb63ee62b3affae7045d750b181184412873698b64ea44634150e7b7a1fac" gracePeriod=30 Jan 30 11:13:48 crc kubenswrapper[4869]: I0130 11:13:48.148776 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-46cwq" Jan 30 11:13:48 crc kubenswrapper[4869]: I0130 11:13:48.152556 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2820db2-7c1c-46d7-9baf-8bf031649668" path="/var/lib/kubelet/pods/a2820db2-7c1c-46d7-9baf-8bf031649668/volumes" Jan 30 11:13:48 crc kubenswrapper[4869]: I0130 11:13:48.153374 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-46cwq" event={"ID":"285e9203-3b0b-4a52-8464-1019a682fd9d","Type":"ContainerDied","Data":"39fc65120531aac7b25c235746af39b9893997ba20c505db8c474cef7f4e52f5"} Jan 30 11:13:48 crc kubenswrapper[4869]: I0130 11:13:48.153406 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="39fc65120531aac7b25c235746af39b9893997ba20c505db8c474cef7f4e52f5" Jan 30 11:13:48 crc kubenswrapper[4869]: I0130 11:13:48.165093 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ec34c29c-665f-465a-99d0-c342aca2cf14","Type":"ContainerStarted","Data":"6f58056d40518f7f08d5b89fddc6140fcb4975e4b4047401e6bcc1e12b2f6a6f"} Jan 30 11:13:48 crc kubenswrapper[4869]: I0130 11:13:48.191589 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.1915689369999996 podStartE2EDuration="4.191568937s" podCreationTimestamp="2026-01-30 11:13:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:13:48.187970335 +0000 UTC m=+1178.737846401" watchObservedRunningTime="2026-01-30 11:13:48.191568937 +0000 UTC m=+1178.741445013" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.103032 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-sm7m6" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.109466 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-86a6-account-create-update-44pzz" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.115314 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-zsp2z" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.122967 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-c83f-account-create-update-hffbp" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.129924 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-8662-account-create-update-nq7mx" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.169979 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4-operator-scripts\") pod \"00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4\" (UID: \"00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4\") " Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.170049 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jgth\" (UniqueName: \"kubernetes.io/projected/00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4-kube-api-access-9jgth\") pod \"00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4\" (UID: \"00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4\") " Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.170075 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r54xn\" (UniqueName: \"kubernetes.io/projected/7f40e387-b913-4f1b-9055-cacff2507f9b-kube-api-access-r54xn\") pod \"7f40e387-b913-4f1b-9055-cacff2507f9b\" (UID: \"7f40e387-b913-4f1b-9055-cacff2507f9b\") " Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.170097 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7f40e387-b913-4f1b-9055-cacff2507f9b-operator-scripts\") pod \"7f40e387-b913-4f1b-9055-cacff2507f9b\" (UID: \"7f40e387-b913-4f1b-9055-cacff2507f9b\") " Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.170139 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rqbz\" (UniqueName: \"kubernetes.io/projected/998d0af4-dd1e-48b0-9b87-c142eb5949f2-kube-api-access-4rqbz\") pod \"998d0af4-dd1e-48b0-9b87-c142eb5949f2\" (UID: \"998d0af4-dd1e-48b0-9b87-c142eb5949f2\") " Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.170160 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m7fgl\" (UniqueName: \"kubernetes.io/projected/76a2d071-f2d2-418f-be6e-0488fa1dd3d8-kube-api-access-m7fgl\") pod \"76a2d071-f2d2-418f-be6e-0488fa1dd3d8\" (UID: \"76a2d071-f2d2-418f-be6e-0488fa1dd3d8\") " Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.170196 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/466decd8-bc59-452e-8c91-03d08f776138-operator-scripts\") pod \"466decd8-bc59-452e-8c91-03d08f776138\" (UID: \"466decd8-bc59-452e-8c91-03d08f776138\") " Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.170236 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76a2d071-f2d2-418f-be6e-0488fa1dd3d8-operator-scripts\") pod \"76a2d071-f2d2-418f-be6e-0488fa1dd3d8\" (UID: \"76a2d071-f2d2-418f-be6e-0488fa1dd3d8\") " Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.170275 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dx58w\" (UniqueName: \"kubernetes.io/projected/466decd8-bc59-452e-8c91-03d08f776138-kube-api-access-dx58w\") pod \"466decd8-bc59-452e-8c91-03d08f776138\" (UID: \"466decd8-bc59-452e-8c91-03d08f776138\") " Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.170315 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/998d0af4-dd1e-48b0-9b87-c142eb5949f2-operator-scripts\") pod \"998d0af4-dd1e-48b0-9b87-c142eb5949f2\" (UID: \"998d0af4-dd1e-48b0-9b87-c142eb5949f2\") " Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.171898 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4" (UID: "00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.174572 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f40e387-b913-4f1b-9055-cacff2507f9b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7f40e387-b913-4f1b-9055-cacff2507f9b" (UID: "7f40e387-b913-4f1b-9055-cacff2507f9b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.174891 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/466decd8-bc59-452e-8c91-03d08f776138-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "466decd8-bc59-452e-8c91-03d08f776138" (UID: "466decd8-bc59-452e-8c91-03d08f776138"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.176093 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/998d0af4-dd1e-48b0-9b87-c142eb5949f2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "998d0af4-dd1e-48b0-9b87-c142eb5949f2" (UID: "998d0af4-dd1e-48b0-9b87-c142eb5949f2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.177487 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76a2d071-f2d2-418f-be6e-0488fa1dd3d8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "76a2d071-f2d2-418f-be6e-0488fa1dd3d8" (UID: "76a2d071-f2d2-418f-be6e-0488fa1dd3d8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.181150 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76a2d071-f2d2-418f-be6e-0488fa1dd3d8-kube-api-access-m7fgl" (OuterVolumeSpecName: "kube-api-access-m7fgl") pod "76a2d071-f2d2-418f-be6e-0488fa1dd3d8" (UID: "76a2d071-f2d2-418f-be6e-0488fa1dd3d8"). InnerVolumeSpecName "kube-api-access-m7fgl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.181203 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4-kube-api-access-9jgth" (OuterVolumeSpecName: "kube-api-access-9jgth") pod "00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4" (UID: "00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4"). InnerVolumeSpecName "kube-api-access-9jgth". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.184307 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f40e387-b913-4f1b-9055-cacff2507f9b-kube-api-access-r54xn" (OuterVolumeSpecName: "kube-api-access-r54xn") pod "7f40e387-b913-4f1b-9055-cacff2507f9b" (UID: "7f40e387-b913-4f1b-9055-cacff2507f9b"). InnerVolumeSpecName "kube-api-access-r54xn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.192172 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/466decd8-bc59-452e-8c91-03d08f776138-kube-api-access-dx58w" (OuterVolumeSpecName: "kube-api-access-dx58w") pod "466decd8-bc59-452e-8c91-03d08f776138" (UID: "466decd8-bc59-452e-8c91-03d08f776138"). InnerVolumeSpecName "kube-api-access-dx58w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.200513 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/998d0af4-dd1e-48b0-9b87-c142eb5949f2-kube-api-access-4rqbz" (OuterVolumeSpecName: "kube-api-access-4rqbz") pod "998d0af4-dd1e-48b0-9b87-c142eb5949f2" (UID: "998d0af4-dd1e-48b0-9b87-c142eb5949f2"). InnerVolumeSpecName "kube-api-access-4rqbz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.208070 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-c83f-account-create-update-hffbp" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.208109 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-c83f-account-create-update-hffbp" event={"ID":"00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4","Type":"ContainerDied","Data":"a55c231295365363074b64d8b23b4754e7dbf9593bb90c0aa1231f33d1612f37"} Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.208149 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a55c231295365363074b64d8b23b4754e7dbf9593bb90c0aa1231f33d1612f37" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.210192 4869 generic.go:334] "Generic (PLEG): container finished" podID="ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20" containerID="8aeef19a30b1cb750ef8efe5876d1ee30611d8c32a2c27a0ced2835a322d663b" exitCode=143 Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.210399 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7d5fb468b8-g8bf7" event={"ID":"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20","Type":"ContainerDied","Data":"8aeef19a30b1cb750ef8efe5876d1ee30611d8c32a2c27a0ced2835a322d663b"} Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.212332 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-8662-account-create-update-nq7mx" event={"ID":"7f40e387-b913-4f1b-9055-cacff2507f9b","Type":"ContainerDied","Data":"53f61a20e7f42d82a7c1067b934962a9b52bd1b33d32554763c71e04e0f20dce"} Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.212369 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53f61a20e7f42d82a7c1067b934962a9b52bd1b33d32554763c71e04e0f20dce" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.212424 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-8662-account-create-update-nq7mx" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.217139 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-86a6-account-create-update-44pzz" event={"ID":"998d0af4-dd1e-48b0-9b87-c142eb5949f2","Type":"ContainerDied","Data":"90c5a88f7899106ff7dd723d56a0b551a3c7a9b358c5a489f1bd79ca1bff1252"} Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.217187 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="90c5a88f7899106ff7dd723d56a0b551a3c7a9b358c5a489f1bd79ca1bff1252" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.217253 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-86a6-account-create-update-44pzz" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.233499 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-sm7m6" event={"ID":"466decd8-bc59-452e-8c91-03d08f776138","Type":"ContainerDied","Data":"be61490a9c45319180350fa65b4288b50757b5157ba2f8749005fbfddc2798fa"} Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.233556 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="be61490a9c45319180350fa65b4288b50757b5157ba2f8749005fbfddc2798fa" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.233652 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-sm7m6" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.245179 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-zsp2z" event={"ID":"76a2d071-f2d2-418f-be6e-0488fa1dd3d8","Type":"ContainerDied","Data":"280725e571bf49690d0583c1a378c49ebce5c9a93130a43f54c74b977338d9fe"} Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.245271 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="280725e571bf49690d0583c1a378c49ebce5c9a93130a43f54c74b977338d9fe" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.245206 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-zsp2z" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.272120 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dx58w\" (UniqueName: \"kubernetes.io/projected/466decd8-bc59-452e-8c91-03d08f776138-kube-api-access-dx58w\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.272163 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/998d0af4-dd1e-48b0-9b87-c142eb5949f2-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.272180 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.272194 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jgth\" (UniqueName: \"kubernetes.io/projected/00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4-kube-api-access-9jgth\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.272206 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r54xn\" (UniqueName: \"kubernetes.io/projected/7f40e387-b913-4f1b-9055-cacff2507f9b-kube-api-access-r54xn\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.272218 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7f40e387-b913-4f1b-9055-cacff2507f9b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.272232 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rqbz\" (UniqueName: \"kubernetes.io/projected/998d0af4-dd1e-48b0-9b87-c142eb5949f2-kube-api-access-4rqbz\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.272244 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m7fgl\" (UniqueName: \"kubernetes.io/projected/76a2d071-f2d2-418f-be6e-0488fa1dd3d8-kube-api-access-m7fgl\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.272255 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/466decd8-bc59-452e-8c91-03d08f776138-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:49 crc kubenswrapper[4869]: I0130 11:13:49.272267 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76a2d071-f2d2-418f-be6e-0488fa1dd3d8-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.271841 4869 generic.go:334] "Generic (PLEG): container finished" podID="c06f8263-5706-4344-ae5f-86c39b0ab980" containerID="9d1babf918b38f92eb376562c9b24a4115d2cc4068b28e9e62137ff937dcb640" exitCode=0 Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.272006 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c06f8263-5706-4344-ae5f-86c39b0ab980","Type":"ContainerDied","Data":"9d1babf918b38f92eb376562c9b24a4115d2cc4068b28e9e62137ff937dcb640"} Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.700931 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.708038 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.829811 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-config-data\") pod \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.829861 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-combined-ca-bundle\") pod \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.829885 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c06f8263-5706-4344-ae5f-86c39b0ab980-log-httpd\") pod \"c06f8263-5706-4344-ae5f-86c39b0ab980\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.829913 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c06f8263-5706-4344-ae5f-86c39b0ab980-scripts\") pod \"c06f8263-5706-4344-ae5f-86c39b0ab980\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.829942 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-internal-tls-certs\") pod \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.829978 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c06f8263-5706-4344-ae5f-86c39b0ab980-combined-ca-bundle\") pod \"c06f8263-5706-4344-ae5f-86c39b0ab980\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.830010 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-public-tls-certs\") pod \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.830039 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c06f8263-5706-4344-ae5f-86c39b0ab980-config-data\") pod \"c06f8263-5706-4344-ae5f-86c39b0ab980\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.830058 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c06f8263-5706-4344-ae5f-86c39b0ab980-sg-core-conf-yaml\") pod \"c06f8263-5706-4344-ae5f-86c39b0ab980\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.830142 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-scripts\") pod \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.830182 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hc5jb\" (UniqueName: \"kubernetes.io/projected/c06f8263-5706-4344-ae5f-86c39b0ab980-kube-api-access-hc5jb\") pod \"c06f8263-5706-4344-ae5f-86c39b0ab980\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.830205 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c06f8263-5706-4344-ae5f-86c39b0ab980-run-httpd\") pod \"c06f8263-5706-4344-ae5f-86c39b0ab980\" (UID: \"c06f8263-5706-4344-ae5f-86c39b0ab980\") " Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.830223 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-logs\") pod \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.830239 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwqsf\" (UniqueName: \"kubernetes.io/projected/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-kube-api-access-wwqsf\") pod \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.832129 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c06f8263-5706-4344-ae5f-86c39b0ab980-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c06f8263-5706-4344-ae5f-86c39b0ab980" (UID: "c06f8263-5706-4344-ae5f-86c39b0ab980"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.832774 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-logs" (OuterVolumeSpecName: "logs") pod "ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20" (UID: "ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.833048 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c06f8263-5706-4344-ae5f-86c39b0ab980-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c06f8263-5706-4344-ae5f-86c39b0ab980" (UID: "c06f8263-5706-4344-ae5f-86c39b0ab980"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.836557 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-scripts" (OuterVolumeSpecName: "scripts") pod "ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20" (UID: "ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.837365 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-kube-api-access-wwqsf" (OuterVolumeSpecName: "kube-api-access-wwqsf") pod "ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20" (UID: "ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20"). InnerVolumeSpecName "kube-api-access-wwqsf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.838465 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c06f8263-5706-4344-ae5f-86c39b0ab980-scripts" (OuterVolumeSpecName: "scripts") pod "c06f8263-5706-4344-ae5f-86c39b0ab980" (UID: "c06f8263-5706-4344-ae5f-86c39b0ab980"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.841174 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c06f8263-5706-4344-ae5f-86c39b0ab980-kube-api-access-hc5jb" (OuterVolumeSpecName: "kube-api-access-hc5jb") pod "c06f8263-5706-4344-ae5f-86c39b0ab980" (UID: "c06f8263-5706-4344-ae5f-86c39b0ab980"). InnerVolumeSpecName "kube-api-access-hc5jb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.868184 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c06f8263-5706-4344-ae5f-86c39b0ab980-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c06f8263-5706-4344-ae5f-86c39b0ab980" (UID: "c06f8263-5706-4344-ae5f-86c39b0ab980"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.888420 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-config-data" (OuterVolumeSpecName: "config-data") pod "ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20" (UID: "ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.913683 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c06f8263-5706-4344-ae5f-86c39b0ab980-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c06f8263-5706-4344-ae5f-86c39b0ab980" (UID: "c06f8263-5706-4344-ae5f-86c39b0ab980"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.931819 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20" (UID: "ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.932362 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-combined-ca-bundle\") pod \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\" (UID: \"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20\") " Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.933429 4869 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c06f8263-5706-4344-ae5f-86c39b0ab980-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.933462 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.933500 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hc5jb\" (UniqueName: \"kubernetes.io/projected/c06f8263-5706-4344-ae5f-86c39b0ab980-kube-api-access-hc5jb\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.933517 4869 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c06f8263-5706-4344-ae5f-86c39b0ab980-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.933530 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-logs\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.933542 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwqsf\" (UniqueName: \"kubernetes.io/projected/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-kube-api-access-wwqsf\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.933553 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.933592 4869 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c06f8263-5706-4344-ae5f-86c39b0ab980-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.933604 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c06f8263-5706-4344-ae5f-86c39b0ab980-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.933615 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c06f8263-5706-4344-ae5f-86c39b0ab980-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:51 crc kubenswrapper[4869]: W0130 11:13:51.934030 4869 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20/volumes/kubernetes.io~secret/combined-ca-bundle Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.934048 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20" (UID: "ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.961057 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20" (UID: "ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.975651 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c06f8263-5706-4344-ae5f-86c39b0ab980-config-data" (OuterVolumeSpecName: "config-data") pod "c06f8263-5706-4344-ae5f-86c39b0ab980" (UID: "c06f8263-5706-4344-ae5f-86c39b0ab980"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:51 crc kubenswrapper[4869]: I0130 11:13:51.987398 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20" (UID: "ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.036107 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.036155 4869 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.036177 4869 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.036197 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c06f8263-5706-4344-ae5f-86c39b0ab980-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.242886 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.242929 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.276092 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.288029 4869 generic.go:334] "Generic (PLEG): container finished" podID="ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20" containerID="404eb63ee62b3affae7045d750b181184412873698b64ea44634150e7b7a1fac" exitCode=0 Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.288091 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7d5fb468b8-g8bf7" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.288066 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7d5fb468b8-g8bf7" event={"ID":"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20","Type":"ContainerDied","Data":"404eb63ee62b3affae7045d750b181184412873698b64ea44634150e7b7a1fac"} Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.288226 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7d5fb468b8-g8bf7" event={"ID":"ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20","Type":"ContainerDied","Data":"ea8c3373f3fcb84389eb5ec5fc4720bc39a770678d5df3852493c1ebb0d68bb1"} Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.288247 4869 scope.go:117] "RemoveContainer" containerID="404eb63ee62b3affae7045d750b181184412873698b64ea44634150e7b7a1fac" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.291515 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.295939 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.296415 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c06f8263-5706-4344-ae5f-86c39b0ab980","Type":"ContainerDied","Data":"b7ce5967af1cdbc5a87693144e99ad8139ea45e70bc8660c872a10e70ff2d495"} Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.296442 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.296561 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.352660 4869 scope.go:117] "RemoveContainer" containerID="8aeef19a30b1cb750ef8efe5876d1ee30611d8c32a2c27a0ced2835a322d663b" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.356866 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-7d5fb468b8-g8bf7"] Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.365118 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-7d5fb468b8-g8bf7"] Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.376649 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.387781 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.404870 4869 scope.go:117] "RemoveContainer" containerID="404eb63ee62b3affae7045d750b181184412873698b64ea44634150e7b7a1fac" Jan 30 11:13:52 crc kubenswrapper[4869]: E0130 11:13:52.406011 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"404eb63ee62b3affae7045d750b181184412873698b64ea44634150e7b7a1fac\": container with ID starting with 404eb63ee62b3affae7045d750b181184412873698b64ea44634150e7b7a1fac not found: ID does not exist" containerID="404eb63ee62b3affae7045d750b181184412873698b64ea44634150e7b7a1fac" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.406137 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"404eb63ee62b3affae7045d750b181184412873698b64ea44634150e7b7a1fac"} err="failed to get container status \"404eb63ee62b3affae7045d750b181184412873698b64ea44634150e7b7a1fac\": rpc error: code = NotFound desc = could not find container \"404eb63ee62b3affae7045d750b181184412873698b64ea44634150e7b7a1fac\": container with ID starting with 404eb63ee62b3affae7045d750b181184412873698b64ea44634150e7b7a1fac not found: ID does not exist" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.406371 4869 scope.go:117] "RemoveContainer" containerID="8aeef19a30b1cb750ef8efe5876d1ee30611d8c32a2c27a0ced2835a322d663b" Jan 30 11:13:52 crc kubenswrapper[4869]: E0130 11:13:52.408038 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8aeef19a30b1cb750ef8efe5876d1ee30611d8c32a2c27a0ced2835a322d663b\": container with ID starting with 8aeef19a30b1cb750ef8efe5876d1ee30611d8c32a2c27a0ced2835a322d663b not found: ID does not exist" containerID="8aeef19a30b1cb750ef8efe5876d1ee30611d8c32a2c27a0ced2835a322d663b" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.408094 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8aeef19a30b1cb750ef8efe5876d1ee30611d8c32a2c27a0ced2835a322d663b"} err="failed to get container status \"8aeef19a30b1cb750ef8efe5876d1ee30611d8c32a2c27a0ced2835a322d663b\": rpc error: code = NotFound desc = could not find container \"8aeef19a30b1cb750ef8efe5876d1ee30611d8c32a2c27a0ced2835a322d663b\": container with ID starting with 8aeef19a30b1cb750ef8efe5876d1ee30611d8c32a2c27a0ced2835a322d663b not found: ID does not exist" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.408127 4869 scope.go:117] "RemoveContainer" containerID="b1f76eaabe3abb8ead38942f2cff271ee2c6df9117a2b04085ed53e746085389" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.430766 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:13:52 crc kubenswrapper[4869]: E0130 11:13:52.431184 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20" containerName="placement-log" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431197 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20" containerName="placement-log" Jan 30 11:13:52 crc kubenswrapper[4869]: E0130 11:13:52.431211 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76a2d071-f2d2-418f-be6e-0488fa1dd3d8" containerName="mariadb-database-create" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431218 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="76a2d071-f2d2-418f-be6e-0488fa1dd3d8" containerName="mariadb-database-create" Jan 30 11:13:52 crc kubenswrapper[4869]: E0130 11:13:52.431240 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f40e387-b913-4f1b-9055-cacff2507f9b" containerName="mariadb-account-create-update" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431246 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f40e387-b913-4f1b-9055-cacff2507f9b" containerName="mariadb-account-create-update" Jan 30 11:13:52 crc kubenswrapper[4869]: E0130 11:13:52.431321 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2820db2-7c1c-46d7-9baf-8bf031649668" containerName="neutron-api" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431332 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2820db2-7c1c-46d7-9baf-8bf031649668" containerName="neutron-api" Jan 30 11:13:52 crc kubenswrapper[4869]: E0130 11:13:52.431343 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="998d0af4-dd1e-48b0-9b87-c142eb5949f2" containerName="mariadb-account-create-update" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431349 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="998d0af4-dd1e-48b0-9b87-c142eb5949f2" containerName="mariadb-account-create-update" Jan 30 11:13:52 crc kubenswrapper[4869]: E0130 11:13:52.431361 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c06f8263-5706-4344-ae5f-86c39b0ab980" containerName="sg-core" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431368 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="c06f8263-5706-4344-ae5f-86c39b0ab980" containerName="sg-core" Jan 30 11:13:52 crc kubenswrapper[4869]: E0130 11:13:52.431383 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2820db2-7c1c-46d7-9baf-8bf031649668" containerName="neutron-httpd" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431390 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2820db2-7c1c-46d7-9baf-8bf031649668" containerName="neutron-httpd" Jan 30 11:13:52 crc kubenswrapper[4869]: E0130 11:13:52.431406 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="466decd8-bc59-452e-8c91-03d08f776138" containerName="mariadb-database-create" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431412 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="466decd8-bc59-452e-8c91-03d08f776138" containerName="mariadb-database-create" Jan 30 11:13:52 crc kubenswrapper[4869]: E0130 11:13:52.431423 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4" containerName="mariadb-account-create-update" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431429 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4" containerName="mariadb-account-create-update" Jan 30 11:13:52 crc kubenswrapper[4869]: E0130 11:13:52.431440 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c06f8263-5706-4344-ae5f-86c39b0ab980" containerName="ceilometer-notification-agent" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431445 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="c06f8263-5706-4344-ae5f-86c39b0ab980" containerName="ceilometer-notification-agent" Jan 30 11:13:52 crc kubenswrapper[4869]: E0130 11:13:52.431455 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="285e9203-3b0b-4a52-8464-1019a682fd9d" containerName="mariadb-database-create" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431462 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="285e9203-3b0b-4a52-8464-1019a682fd9d" containerName="mariadb-database-create" Jan 30 11:13:52 crc kubenswrapper[4869]: E0130 11:13:52.431478 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c06f8263-5706-4344-ae5f-86c39b0ab980" containerName="ceilometer-central-agent" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431486 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="c06f8263-5706-4344-ae5f-86c39b0ab980" containerName="ceilometer-central-agent" Jan 30 11:13:52 crc kubenswrapper[4869]: E0130 11:13:52.431499 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c06f8263-5706-4344-ae5f-86c39b0ab980" containerName="proxy-httpd" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431505 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="c06f8263-5706-4344-ae5f-86c39b0ab980" containerName="proxy-httpd" Jan 30 11:13:52 crc kubenswrapper[4869]: E0130 11:13:52.431514 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20" containerName="placement-api" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431521 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20" containerName="placement-api" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431729 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="c06f8263-5706-4344-ae5f-86c39b0ab980" containerName="sg-core" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431742 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="466decd8-bc59-452e-8c91-03d08f776138" containerName="mariadb-database-create" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431756 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="76a2d071-f2d2-418f-be6e-0488fa1dd3d8" containerName="mariadb-database-create" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431767 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="c06f8263-5706-4344-ae5f-86c39b0ab980" containerName="ceilometer-central-agent" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431777 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2820db2-7c1c-46d7-9baf-8bf031649668" containerName="neutron-api" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431786 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="285e9203-3b0b-4a52-8464-1019a682fd9d" containerName="mariadb-database-create" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431795 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f40e387-b913-4f1b-9055-cacff2507f9b" containerName="mariadb-account-create-update" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431805 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20" containerName="placement-api" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431815 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2820db2-7c1c-46d7-9baf-8bf031649668" containerName="neutron-httpd" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431825 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20" containerName="placement-log" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431837 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="998d0af4-dd1e-48b0-9b87-c142eb5949f2" containerName="mariadb-account-create-update" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431845 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="c06f8263-5706-4344-ae5f-86c39b0ab980" containerName="proxy-httpd" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431854 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="c06f8263-5706-4344-ae5f-86c39b0ab980" containerName="ceilometer-notification-agent" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.431862 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4" containerName="mariadb-account-create-update" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.433547 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.436020 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.440647 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.444851 4869 scope.go:117] "RemoveContainer" containerID="abb9df5e4666c0b3734ef2a2d3b2dde13a486675d6cef73821683e29da5b6f76" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.445907 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.478916 4869 scope.go:117] "RemoveContainer" containerID="4f0599e29a9e768b926e60d1720c9416d780482143d58d1518a23fed6d6c1577" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.521662 4869 scope.go:117] "RemoveContainer" containerID="9d1babf918b38f92eb376562c9b24a4115d2cc4068b28e9e62137ff937dcb640" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.544607 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a25b91e3-ec86-4a99-b9e1-396b6c431b15-run-httpd\") pod \"ceilometer-0\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " pod="openstack/ceilometer-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.544679 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a25b91e3-ec86-4a99-b9e1-396b6c431b15-config-data\") pod \"ceilometer-0\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " pod="openstack/ceilometer-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.544730 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a25b91e3-ec86-4a99-b9e1-396b6c431b15-log-httpd\") pod \"ceilometer-0\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " pod="openstack/ceilometer-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.544771 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a25b91e3-ec86-4a99-b9e1-396b6c431b15-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " pod="openstack/ceilometer-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.544880 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a25b91e3-ec86-4a99-b9e1-396b6c431b15-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " pod="openstack/ceilometer-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.544942 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a25b91e3-ec86-4a99-b9e1-396b6c431b15-scripts\") pod \"ceilometer-0\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " pod="openstack/ceilometer-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.544966 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9g7wm\" (UniqueName: \"kubernetes.io/projected/a25b91e3-ec86-4a99-b9e1-396b6c431b15-kube-api-access-9g7wm\") pod \"ceilometer-0\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " pod="openstack/ceilometer-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.646070 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a25b91e3-ec86-4a99-b9e1-396b6c431b15-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " pod="openstack/ceilometer-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.646151 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a25b91e3-ec86-4a99-b9e1-396b6c431b15-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " pod="openstack/ceilometer-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.646201 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a25b91e3-ec86-4a99-b9e1-396b6c431b15-scripts\") pod \"ceilometer-0\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " pod="openstack/ceilometer-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.646224 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9g7wm\" (UniqueName: \"kubernetes.io/projected/a25b91e3-ec86-4a99-b9e1-396b6c431b15-kube-api-access-9g7wm\") pod \"ceilometer-0\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " pod="openstack/ceilometer-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.646273 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a25b91e3-ec86-4a99-b9e1-396b6c431b15-run-httpd\") pod \"ceilometer-0\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " pod="openstack/ceilometer-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.646295 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a25b91e3-ec86-4a99-b9e1-396b6c431b15-config-data\") pod \"ceilometer-0\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " pod="openstack/ceilometer-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.646319 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a25b91e3-ec86-4a99-b9e1-396b6c431b15-log-httpd\") pod \"ceilometer-0\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " pod="openstack/ceilometer-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.646778 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a25b91e3-ec86-4a99-b9e1-396b6c431b15-log-httpd\") pod \"ceilometer-0\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " pod="openstack/ceilometer-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.647818 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a25b91e3-ec86-4a99-b9e1-396b6c431b15-run-httpd\") pod \"ceilometer-0\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " pod="openstack/ceilometer-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.650950 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a25b91e3-ec86-4a99-b9e1-396b6c431b15-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " pod="openstack/ceilometer-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.651057 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a25b91e3-ec86-4a99-b9e1-396b6c431b15-config-data\") pod \"ceilometer-0\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " pod="openstack/ceilometer-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.651980 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a25b91e3-ec86-4a99-b9e1-396b6c431b15-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " pod="openstack/ceilometer-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.660883 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a25b91e3-ec86-4a99-b9e1-396b6c431b15-scripts\") pod \"ceilometer-0\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " pod="openstack/ceilometer-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.667022 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9g7wm\" (UniqueName: \"kubernetes.io/projected/a25b91e3-ec86-4a99-b9e1-396b6c431b15-kube-api-access-9g7wm\") pod \"ceilometer-0\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " pod="openstack/ceilometer-0" Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.687556 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:13:52 crc kubenswrapper[4869]: I0130 11:13:52.689107 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:13:53 crc kubenswrapper[4869]: I0130 11:13:53.161540 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:13:53 crc kubenswrapper[4869]: W0130 11:13:53.168799 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda25b91e3_ec86_4a99_b9e1_396b6c431b15.slice/crio-0b07ddf70e6dd7347eaf91a3419bc936f12a618a85061c8e52f621104246cda0 WatchSource:0}: Error finding container 0b07ddf70e6dd7347eaf91a3419bc936f12a618a85061c8e52f621104246cda0: Status 404 returned error can't find the container with id 0b07ddf70e6dd7347eaf91a3419bc936f12a618a85061c8e52f621104246cda0 Jan 30 11:13:53 crc kubenswrapper[4869]: I0130 11:13:53.307058 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a25b91e3-ec86-4a99-b9e1-396b6c431b15","Type":"ContainerStarted","Data":"0b07ddf70e6dd7347eaf91a3419bc936f12a618a85061c8e52f621104246cda0"} Jan 30 11:13:54 crc kubenswrapper[4869]: I0130 11:13:54.143685 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c06f8263-5706-4344-ae5f-86c39b0ab980" path="/var/lib/kubelet/pods/c06f8263-5706-4344-ae5f-86c39b0ab980/volumes" Jan 30 11:13:54 crc kubenswrapper[4869]: I0130 11:13:54.145214 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20" path="/var/lib/kubelet/pods/ca11f9fd-8bd6-4e6c-b99b-187fd5b00d20/volumes" Jan 30 11:13:54 crc kubenswrapper[4869]: I0130 11:13:54.317328 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a25b91e3-ec86-4a99-b9e1-396b6c431b15","Type":"ContainerStarted","Data":"e12851d1321ac0912e19af01d12cc6ffaad583951b3cbd8fb916dc4718e6fc76"} Jan 30 11:13:54 crc kubenswrapper[4869]: I0130 11:13:54.317347 4869 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 30 11:13:54 crc kubenswrapper[4869]: I0130 11:13:54.317973 4869 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 30 11:13:54 crc kubenswrapper[4869]: I0130 11:13:54.524652 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 30 11:13:54 crc kubenswrapper[4869]: I0130 11:13:54.524749 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 30 11:13:54 crc kubenswrapper[4869]: I0130 11:13:54.770244 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 30 11:13:54 crc kubenswrapper[4869]: I0130 11:13:54.770340 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 30 11:13:54 crc kubenswrapper[4869]: I0130 11:13:54.810874 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 30 11:13:54 crc kubenswrapper[4869]: I0130 11:13:54.844647 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 30 11:13:55 crc kubenswrapper[4869]: I0130 11:13:55.070994 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-tfgnj"] Jan 30 11:13:55 crc kubenswrapper[4869]: I0130 11:13:55.073119 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-tfgnj" Jan 30 11:13:55 crc kubenswrapper[4869]: I0130 11:13:55.077218 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Jan 30 11:13:55 crc kubenswrapper[4869]: I0130 11:13:55.078099 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 30 11:13:55 crc kubenswrapper[4869]: I0130 11:13:55.082882 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-tfgnj"] Jan 30 11:13:55 crc kubenswrapper[4869]: I0130 11:13:55.120976 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-mwvb7" Jan 30 11:13:55 crc kubenswrapper[4869]: I0130 11:13:55.198083 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78dc88a3-3860-4c7f-acaf-5e2568a8761d-scripts\") pod \"nova-cell0-conductor-db-sync-tfgnj\" (UID: \"78dc88a3-3860-4c7f-acaf-5e2568a8761d\") " pod="openstack/nova-cell0-conductor-db-sync-tfgnj" Jan 30 11:13:55 crc kubenswrapper[4869]: I0130 11:13:55.198364 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78dc88a3-3860-4c7f-acaf-5e2568a8761d-config-data\") pod \"nova-cell0-conductor-db-sync-tfgnj\" (UID: \"78dc88a3-3860-4c7f-acaf-5e2568a8761d\") " pod="openstack/nova-cell0-conductor-db-sync-tfgnj" Jan 30 11:13:55 crc kubenswrapper[4869]: I0130 11:13:55.198639 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpksz\" (UniqueName: \"kubernetes.io/projected/78dc88a3-3860-4c7f-acaf-5e2568a8761d-kube-api-access-jpksz\") pod \"nova-cell0-conductor-db-sync-tfgnj\" (UID: \"78dc88a3-3860-4c7f-acaf-5e2568a8761d\") " pod="openstack/nova-cell0-conductor-db-sync-tfgnj" Jan 30 11:13:55 crc kubenswrapper[4869]: I0130 11:13:55.198793 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78dc88a3-3860-4c7f-acaf-5e2568a8761d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-tfgnj\" (UID: \"78dc88a3-3860-4c7f-acaf-5e2568a8761d\") " pod="openstack/nova-cell0-conductor-db-sync-tfgnj" Jan 30 11:13:55 crc kubenswrapper[4869]: I0130 11:13:55.301002 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78dc88a3-3860-4c7f-acaf-5e2568a8761d-scripts\") pod \"nova-cell0-conductor-db-sync-tfgnj\" (UID: \"78dc88a3-3860-4c7f-acaf-5e2568a8761d\") " pod="openstack/nova-cell0-conductor-db-sync-tfgnj" Jan 30 11:13:55 crc kubenswrapper[4869]: I0130 11:13:55.301305 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78dc88a3-3860-4c7f-acaf-5e2568a8761d-config-data\") pod \"nova-cell0-conductor-db-sync-tfgnj\" (UID: \"78dc88a3-3860-4c7f-acaf-5e2568a8761d\") " pod="openstack/nova-cell0-conductor-db-sync-tfgnj" Jan 30 11:13:55 crc kubenswrapper[4869]: I0130 11:13:55.301490 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpksz\" (UniqueName: \"kubernetes.io/projected/78dc88a3-3860-4c7f-acaf-5e2568a8761d-kube-api-access-jpksz\") pod \"nova-cell0-conductor-db-sync-tfgnj\" (UID: \"78dc88a3-3860-4c7f-acaf-5e2568a8761d\") " pod="openstack/nova-cell0-conductor-db-sync-tfgnj" Jan 30 11:13:55 crc kubenswrapper[4869]: I0130 11:13:55.301603 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78dc88a3-3860-4c7f-acaf-5e2568a8761d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-tfgnj\" (UID: \"78dc88a3-3860-4c7f-acaf-5e2568a8761d\") " pod="openstack/nova-cell0-conductor-db-sync-tfgnj" Jan 30 11:13:55 crc kubenswrapper[4869]: I0130 11:13:55.308489 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78dc88a3-3860-4c7f-acaf-5e2568a8761d-config-data\") pod \"nova-cell0-conductor-db-sync-tfgnj\" (UID: \"78dc88a3-3860-4c7f-acaf-5e2568a8761d\") " pod="openstack/nova-cell0-conductor-db-sync-tfgnj" Jan 30 11:13:55 crc kubenswrapper[4869]: I0130 11:13:55.311320 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78dc88a3-3860-4c7f-acaf-5e2568a8761d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-tfgnj\" (UID: \"78dc88a3-3860-4c7f-acaf-5e2568a8761d\") " pod="openstack/nova-cell0-conductor-db-sync-tfgnj" Jan 30 11:13:55 crc kubenswrapper[4869]: I0130 11:13:55.316369 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78dc88a3-3860-4c7f-acaf-5e2568a8761d-scripts\") pod \"nova-cell0-conductor-db-sync-tfgnj\" (UID: \"78dc88a3-3860-4c7f-acaf-5e2568a8761d\") " pod="openstack/nova-cell0-conductor-db-sync-tfgnj" Jan 30 11:13:55 crc kubenswrapper[4869]: I0130 11:13:55.320785 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpksz\" (UniqueName: \"kubernetes.io/projected/78dc88a3-3860-4c7f-acaf-5e2568a8761d-kube-api-access-jpksz\") pod \"nova-cell0-conductor-db-sync-tfgnj\" (UID: \"78dc88a3-3860-4c7f-acaf-5e2568a8761d\") " pod="openstack/nova-cell0-conductor-db-sync-tfgnj" Jan 30 11:13:55 crc kubenswrapper[4869]: I0130 11:13:55.327556 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 30 11:13:55 crc kubenswrapper[4869]: I0130 11:13:55.327892 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 30 11:13:55 crc kubenswrapper[4869]: I0130 11:13:55.452935 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-tfgnj" Jan 30 11:13:55 crc kubenswrapper[4869]: I0130 11:13:55.916106 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-tfgnj"] Jan 30 11:13:56 crc kubenswrapper[4869]: I0130 11:13:56.369495 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-tfgnj" event={"ID":"78dc88a3-3860-4c7f-acaf-5e2568a8761d","Type":"ContainerStarted","Data":"6da5592480d95caca68a04f8a8b10cdf9fb4b942d7848fed546d69bc5a1fd38a"} Jan 30 11:13:56 crc kubenswrapper[4869]: I0130 11:13:56.385053 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a25b91e3-ec86-4a99-b9e1-396b6c431b15","Type":"ContainerStarted","Data":"9a7d5a09e953fdb828b4670e67797d1db305d8243dc43f69aa2e6361a758e2f3"} Jan 30 11:13:57 crc kubenswrapper[4869]: I0130 11:13:57.400726 4869 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 30 11:13:57 crc kubenswrapper[4869]: I0130 11:13:57.401001 4869 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 30 11:13:57 crc kubenswrapper[4869]: I0130 11:13:57.400926 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a25b91e3-ec86-4a99-b9e1-396b6c431b15","Type":"ContainerStarted","Data":"6367f81ca9a22080b0e19b2d98c7c8c16fd1baa9fcdc1bc1ce978b8db3719c47"} Jan 30 11:13:57 crc kubenswrapper[4869]: I0130 11:13:57.740211 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 30 11:13:57 crc kubenswrapper[4869]: I0130 11:13:57.749511 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 30 11:13:58 crc kubenswrapper[4869]: I0130 11:13:58.434116 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a25b91e3-ec86-4a99-b9e1-396b6c431b15" containerName="ceilometer-central-agent" containerID="cri-o://e12851d1321ac0912e19af01d12cc6ffaad583951b3cbd8fb916dc4718e6fc76" gracePeriod=30 Jan 30 11:13:58 crc kubenswrapper[4869]: I0130 11:13:58.434507 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a25b91e3-ec86-4a99-b9e1-396b6c431b15","Type":"ContainerStarted","Data":"22feb76ecdffb93034551d040858ffcc94553b1741035fbfde4d02cf0f499b7b"} Jan 30 11:13:58 crc kubenswrapper[4869]: I0130 11:13:58.434556 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 30 11:13:58 crc kubenswrapper[4869]: I0130 11:13:58.434586 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a25b91e3-ec86-4a99-b9e1-396b6c431b15" containerName="proxy-httpd" containerID="cri-o://22feb76ecdffb93034551d040858ffcc94553b1741035fbfde4d02cf0f499b7b" gracePeriod=30 Jan 30 11:13:58 crc kubenswrapper[4869]: I0130 11:13:58.434673 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a25b91e3-ec86-4a99-b9e1-396b6c431b15" containerName="sg-core" containerID="cri-o://6367f81ca9a22080b0e19b2d98c7c8c16fd1baa9fcdc1bc1ce978b8db3719c47" gracePeriod=30 Jan 30 11:13:58 crc kubenswrapper[4869]: I0130 11:13:58.434746 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a25b91e3-ec86-4a99-b9e1-396b6c431b15" containerName="ceilometer-notification-agent" containerID="cri-o://9a7d5a09e953fdb828b4670e67797d1db305d8243dc43f69aa2e6361a758e2f3" gracePeriod=30 Jan 30 11:13:58 crc kubenswrapper[4869]: I0130 11:13:58.467933 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.761377803 podStartE2EDuration="6.467908894s" podCreationTimestamp="2026-01-30 11:13:52 +0000 UTC" firstStartedPulling="2026-01-30 11:13:53.17114402 +0000 UTC m=+1183.721020086" lastFinishedPulling="2026-01-30 11:13:57.877675111 +0000 UTC m=+1188.427551177" observedRunningTime="2026-01-30 11:13:58.453115674 +0000 UTC m=+1189.002991760" watchObservedRunningTime="2026-01-30 11:13:58.467908894 +0000 UTC m=+1189.017784960" Jan 30 11:13:59 crc kubenswrapper[4869]: I0130 11:13:59.448016 4869 generic.go:334] "Generic (PLEG): container finished" podID="a25b91e3-ec86-4a99-b9e1-396b6c431b15" containerID="22feb76ecdffb93034551d040858ffcc94553b1741035fbfde4d02cf0f499b7b" exitCode=0 Jan 30 11:13:59 crc kubenswrapper[4869]: I0130 11:13:59.448305 4869 generic.go:334] "Generic (PLEG): container finished" podID="a25b91e3-ec86-4a99-b9e1-396b6c431b15" containerID="6367f81ca9a22080b0e19b2d98c7c8c16fd1baa9fcdc1bc1ce978b8db3719c47" exitCode=2 Jan 30 11:13:59 crc kubenswrapper[4869]: I0130 11:13:59.448318 4869 generic.go:334] "Generic (PLEG): container finished" podID="a25b91e3-ec86-4a99-b9e1-396b6c431b15" containerID="9a7d5a09e953fdb828b4670e67797d1db305d8243dc43f69aa2e6361a758e2f3" exitCode=0 Jan 30 11:13:59 crc kubenswrapper[4869]: I0130 11:13:59.448091 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a25b91e3-ec86-4a99-b9e1-396b6c431b15","Type":"ContainerDied","Data":"22feb76ecdffb93034551d040858ffcc94553b1741035fbfde4d02cf0f499b7b"} Jan 30 11:13:59 crc kubenswrapper[4869]: I0130 11:13:59.448404 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a25b91e3-ec86-4a99-b9e1-396b6c431b15","Type":"ContainerDied","Data":"6367f81ca9a22080b0e19b2d98c7c8c16fd1baa9fcdc1bc1ce978b8db3719c47"} Jan 30 11:13:59 crc kubenswrapper[4869]: I0130 11:13:59.448416 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a25b91e3-ec86-4a99-b9e1-396b6c431b15","Type":"ContainerDied","Data":"9a7d5a09e953fdb828b4670e67797d1db305d8243dc43f69aa2e6361a758e2f3"} Jan 30 11:14:05 crc kubenswrapper[4869]: I0130 11:14:05.531304 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-tfgnj" event={"ID":"78dc88a3-3860-4c7f-acaf-5e2568a8761d","Type":"ContainerStarted","Data":"15f42f64893266a96f35bcf2c3e762836f56e50d156b31125fffc23d316c68c5"} Jan 30 11:14:05 crc kubenswrapper[4869]: I0130 11:14:05.552460 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-tfgnj" podStartSLOduration=2.009821339 podStartE2EDuration="10.55243671s" podCreationTimestamp="2026-01-30 11:13:55 +0000 UTC" firstStartedPulling="2026-01-30 11:13:55.924115415 +0000 UTC m=+1186.473991481" lastFinishedPulling="2026-01-30 11:14:04.466730796 +0000 UTC m=+1195.016606852" observedRunningTime="2026-01-30 11:14:05.548129547 +0000 UTC m=+1196.098005613" watchObservedRunningTime="2026-01-30 11:14:05.55243671 +0000 UTC m=+1196.102312776" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.144876 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.187979 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a25b91e3-ec86-4a99-b9e1-396b6c431b15-log-httpd\") pod \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.188700 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a25b91e3-ec86-4a99-b9e1-396b6c431b15-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a25b91e3-ec86-4a99-b9e1-396b6c431b15" (UID: "a25b91e3-ec86-4a99-b9e1-396b6c431b15"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.289752 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9g7wm\" (UniqueName: \"kubernetes.io/projected/a25b91e3-ec86-4a99-b9e1-396b6c431b15-kube-api-access-9g7wm\") pod \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.289858 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a25b91e3-ec86-4a99-b9e1-396b6c431b15-scripts\") pod \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.289924 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a25b91e3-ec86-4a99-b9e1-396b6c431b15-config-data\") pod \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.289961 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a25b91e3-ec86-4a99-b9e1-396b6c431b15-run-httpd\") pod \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.289999 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a25b91e3-ec86-4a99-b9e1-396b6c431b15-sg-core-conf-yaml\") pod \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.290027 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a25b91e3-ec86-4a99-b9e1-396b6c431b15-combined-ca-bundle\") pod \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\" (UID: \"a25b91e3-ec86-4a99-b9e1-396b6c431b15\") " Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.290284 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a25b91e3-ec86-4a99-b9e1-396b6c431b15-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a25b91e3-ec86-4a99-b9e1-396b6c431b15" (UID: "a25b91e3-ec86-4a99-b9e1-396b6c431b15"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.290351 4869 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a25b91e3-ec86-4a99-b9e1-396b6c431b15-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.296227 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a25b91e3-ec86-4a99-b9e1-396b6c431b15-kube-api-access-9g7wm" (OuterVolumeSpecName: "kube-api-access-9g7wm") pod "a25b91e3-ec86-4a99-b9e1-396b6c431b15" (UID: "a25b91e3-ec86-4a99-b9e1-396b6c431b15"). InnerVolumeSpecName "kube-api-access-9g7wm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.296559 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a25b91e3-ec86-4a99-b9e1-396b6c431b15-scripts" (OuterVolumeSpecName: "scripts") pod "a25b91e3-ec86-4a99-b9e1-396b6c431b15" (UID: "a25b91e3-ec86-4a99-b9e1-396b6c431b15"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.323116 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a25b91e3-ec86-4a99-b9e1-396b6c431b15-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a25b91e3-ec86-4a99-b9e1-396b6c431b15" (UID: "a25b91e3-ec86-4a99-b9e1-396b6c431b15"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.391233 4869 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a25b91e3-ec86-4a99-b9e1-396b6c431b15-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.391266 4869 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a25b91e3-ec86-4a99-b9e1-396b6c431b15-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.391279 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9g7wm\" (UniqueName: \"kubernetes.io/projected/a25b91e3-ec86-4a99-b9e1-396b6c431b15-kube-api-access-9g7wm\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.391289 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a25b91e3-ec86-4a99-b9e1-396b6c431b15-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.404825 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a25b91e3-ec86-4a99-b9e1-396b6c431b15-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a25b91e3-ec86-4a99-b9e1-396b6c431b15" (UID: "a25b91e3-ec86-4a99-b9e1-396b6c431b15"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.415742 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a25b91e3-ec86-4a99-b9e1-396b6c431b15-config-data" (OuterVolumeSpecName: "config-data") pod "a25b91e3-ec86-4a99-b9e1-396b6c431b15" (UID: "a25b91e3-ec86-4a99-b9e1-396b6c431b15"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.492643 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a25b91e3-ec86-4a99-b9e1-396b6c431b15-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.492679 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a25b91e3-ec86-4a99-b9e1-396b6c431b15-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.543720 4869 generic.go:334] "Generic (PLEG): container finished" podID="a25b91e3-ec86-4a99-b9e1-396b6c431b15" containerID="e12851d1321ac0912e19af01d12cc6ffaad583951b3cbd8fb916dc4718e6fc76" exitCode=0 Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.545247 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.554081 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a25b91e3-ec86-4a99-b9e1-396b6c431b15","Type":"ContainerDied","Data":"e12851d1321ac0912e19af01d12cc6ffaad583951b3cbd8fb916dc4718e6fc76"} Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.559816 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a25b91e3-ec86-4a99-b9e1-396b6c431b15","Type":"ContainerDied","Data":"0b07ddf70e6dd7347eaf91a3419bc936f12a618a85061c8e52f621104246cda0"} Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.559866 4869 scope.go:117] "RemoveContainer" containerID="22feb76ecdffb93034551d040858ffcc94553b1741035fbfde4d02cf0f499b7b" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.613372 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.621003 4869 scope.go:117] "RemoveContainer" containerID="6367f81ca9a22080b0e19b2d98c7c8c16fd1baa9fcdc1bc1ce978b8db3719c47" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.621929 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.633877 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:14:06 crc kubenswrapper[4869]: E0130 11:14:06.634254 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a25b91e3-ec86-4a99-b9e1-396b6c431b15" containerName="ceilometer-notification-agent" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.634275 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a25b91e3-ec86-4a99-b9e1-396b6c431b15" containerName="ceilometer-notification-agent" Jan 30 11:14:06 crc kubenswrapper[4869]: E0130 11:14:06.634295 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a25b91e3-ec86-4a99-b9e1-396b6c431b15" containerName="ceilometer-central-agent" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.634301 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a25b91e3-ec86-4a99-b9e1-396b6c431b15" containerName="ceilometer-central-agent" Jan 30 11:14:06 crc kubenswrapper[4869]: E0130 11:14:06.634318 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a25b91e3-ec86-4a99-b9e1-396b6c431b15" containerName="proxy-httpd" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.634327 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a25b91e3-ec86-4a99-b9e1-396b6c431b15" containerName="proxy-httpd" Jan 30 11:14:06 crc kubenswrapper[4869]: E0130 11:14:06.634350 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a25b91e3-ec86-4a99-b9e1-396b6c431b15" containerName="sg-core" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.634358 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a25b91e3-ec86-4a99-b9e1-396b6c431b15" containerName="sg-core" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.634561 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a25b91e3-ec86-4a99-b9e1-396b6c431b15" containerName="ceilometer-notification-agent" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.634579 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a25b91e3-ec86-4a99-b9e1-396b6c431b15" containerName="ceilometer-central-agent" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.634594 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a25b91e3-ec86-4a99-b9e1-396b6c431b15" containerName="sg-core" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.634617 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a25b91e3-ec86-4a99-b9e1-396b6c431b15" containerName="proxy-httpd" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.636681 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.641369 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.641777 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.653430 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.657197 4869 scope.go:117] "RemoveContainer" containerID="9a7d5a09e953fdb828b4670e67797d1db305d8243dc43f69aa2e6361a758e2f3" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.695699 4869 scope.go:117] "RemoveContainer" containerID="e12851d1321ac0912e19af01d12cc6ffaad583951b3cbd8fb916dc4718e6fc76" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.740384 4869 scope.go:117] "RemoveContainer" containerID="22feb76ecdffb93034551d040858ffcc94553b1741035fbfde4d02cf0f499b7b" Jan 30 11:14:06 crc kubenswrapper[4869]: E0130 11:14:06.741108 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22feb76ecdffb93034551d040858ffcc94553b1741035fbfde4d02cf0f499b7b\": container with ID starting with 22feb76ecdffb93034551d040858ffcc94553b1741035fbfde4d02cf0f499b7b not found: ID does not exist" containerID="22feb76ecdffb93034551d040858ffcc94553b1741035fbfde4d02cf0f499b7b" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.741164 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22feb76ecdffb93034551d040858ffcc94553b1741035fbfde4d02cf0f499b7b"} err="failed to get container status \"22feb76ecdffb93034551d040858ffcc94553b1741035fbfde4d02cf0f499b7b\": rpc error: code = NotFound desc = could not find container \"22feb76ecdffb93034551d040858ffcc94553b1741035fbfde4d02cf0f499b7b\": container with ID starting with 22feb76ecdffb93034551d040858ffcc94553b1741035fbfde4d02cf0f499b7b not found: ID does not exist" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.741192 4869 scope.go:117] "RemoveContainer" containerID="6367f81ca9a22080b0e19b2d98c7c8c16fd1baa9fcdc1bc1ce978b8db3719c47" Jan 30 11:14:06 crc kubenswrapper[4869]: E0130 11:14:06.741913 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6367f81ca9a22080b0e19b2d98c7c8c16fd1baa9fcdc1bc1ce978b8db3719c47\": container with ID starting with 6367f81ca9a22080b0e19b2d98c7c8c16fd1baa9fcdc1bc1ce978b8db3719c47 not found: ID does not exist" containerID="6367f81ca9a22080b0e19b2d98c7c8c16fd1baa9fcdc1bc1ce978b8db3719c47" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.741965 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6367f81ca9a22080b0e19b2d98c7c8c16fd1baa9fcdc1bc1ce978b8db3719c47"} err="failed to get container status \"6367f81ca9a22080b0e19b2d98c7c8c16fd1baa9fcdc1bc1ce978b8db3719c47\": rpc error: code = NotFound desc = could not find container \"6367f81ca9a22080b0e19b2d98c7c8c16fd1baa9fcdc1bc1ce978b8db3719c47\": container with ID starting with 6367f81ca9a22080b0e19b2d98c7c8c16fd1baa9fcdc1bc1ce978b8db3719c47 not found: ID does not exist" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.741996 4869 scope.go:117] "RemoveContainer" containerID="9a7d5a09e953fdb828b4670e67797d1db305d8243dc43f69aa2e6361a758e2f3" Jan 30 11:14:06 crc kubenswrapper[4869]: E0130 11:14:06.742545 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a7d5a09e953fdb828b4670e67797d1db305d8243dc43f69aa2e6361a758e2f3\": container with ID starting with 9a7d5a09e953fdb828b4670e67797d1db305d8243dc43f69aa2e6361a758e2f3 not found: ID does not exist" containerID="9a7d5a09e953fdb828b4670e67797d1db305d8243dc43f69aa2e6361a758e2f3" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.742590 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a7d5a09e953fdb828b4670e67797d1db305d8243dc43f69aa2e6361a758e2f3"} err="failed to get container status \"9a7d5a09e953fdb828b4670e67797d1db305d8243dc43f69aa2e6361a758e2f3\": rpc error: code = NotFound desc = could not find container \"9a7d5a09e953fdb828b4670e67797d1db305d8243dc43f69aa2e6361a758e2f3\": container with ID starting with 9a7d5a09e953fdb828b4670e67797d1db305d8243dc43f69aa2e6361a758e2f3 not found: ID does not exist" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.742620 4869 scope.go:117] "RemoveContainer" containerID="e12851d1321ac0912e19af01d12cc6ffaad583951b3cbd8fb916dc4718e6fc76" Jan 30 11:14:06 crc kubenswrapper[4869]: E0130 11:14:06.742962 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e12851d1321ac0912e19af01d12cc6ffaad583951b3cbd8fb916dc4718e6fc76\": container with ID starting with e12851d1321ac0912e19af01d12cc6ffaad583951b3cbd8fb916dc4718e6fc76 not found: ID does not exist" containerID="e12851d1321ac0912e19af01d12cc6ffaad583951b3cbd8fb916dc4718e6fc76" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.742998 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e12851d1321ac0912e19af01d12cc6ffaad583951b3cbd8fb916dc4718e6fc76"} err="failed to get container status \"e12851d1321ac0912e19af01d12cc6ffaad583951b3cbd8fb916dc4718e6fc76\": rpc error: code = NotFound desc = could not find container \"e12851d1321ac0912e19af01d12cc6ffaad583951b3cbd8fb916dc4718e6fc76\": container with ID starting with e12851d1321ac0912e19af01d12cc6ffaad583951b3cbd8fb916dc4718e6fc76 not found: ID does not exist" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.803624 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a96afda1-0656-4607-b671-65ace8da5d7b-config-data\") pod \"ceilometer-0\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " pod="openstack/ceilometer-0" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.803691 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m592z\" (UniqueName: \"kubernetes.io/projected/a96afda1-0656-4607-b671-65ace8da5d7b-kube-api-access-m592z\") pod \"ceilometer-0\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " pod="openstack/ceilometer-0" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.803798 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a96afda1-0656-4607-b671-65ace8da5d7b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " pod="openstack/ceilometer-0" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.803822 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a96afda1-0656-4607-b671-65ace8da5d7b-run-httpd\") pod \"ceilometer-0\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " pod="openstack/ceilometer-0" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.803851 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a96afda1-0656-4607-b671-65ace8da5d7b-scripts\") pod \"ceilometer-0\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " pod="openstack/ceilometer-0" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.803875 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a96afda1-0656-4607-b671-65ace8da5d7b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " pod="openstack/ceilometer-0" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.803985 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a96afda1-0656-4607-b671-65ace8da5d7b-log-httpd\") pod \"ceilometer-0\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " pod="openstack/ceilometer-0" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.905270 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a96afda1-0656-4607-b671-65ace8da5d7b-config-data\") pod \"ceilometer-0\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " pod="openstack/ceilometer-0" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.905587 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m592z\" (UniqueName: \"kubernetes.io/projected/a96afda1-0656-4607-b671-65ace8da5d7b-kube-api-access-m592z\") pod \"ceilometer-0\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " pod="openstack/ceilometer-0" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.905649 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a96afda1-0656-4607-b671-65ace8da5d7b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " pod="openstack/ceilometer-0" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.905674 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a96afda1-0656-4607-b671-65ace8da5d7b-run-httpd\") pod \"ceilometer-0\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " pod="openstack/ceilometer-0" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.905721 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a96afda1-0656-4607-b671-65ace8da5d7b-scripts\") pod \"ceilometer-0\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " pod="openstack/ceilometer-0" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.905745 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a96afda1-0656-4607-b671-65ace8da5d7b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " pod="openstack/ceilometer-0" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.905808 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a96afda1-0656-4607-b671-65ace8da5d7b-log-httpd\") pod \"ceilometer-0\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " pod="openstack/ceilometer-0" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.907247 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a96afda1-0656-4607-b671-65ace8da5d7b-log-httpd\") pod \"ceilometer-0\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " pod="openstack/ceilometer-0" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.912430 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a96afda1-0656-4607-b671-65ace8da5d7b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " pod="openstack/ceilometer-0" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.912845 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a96afda1-0656-4607-b671-65ace8da5d7b-scripts\") pod \"ceilometer-0\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " pod="openstack/ceilometer-0" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.912953 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a96afda1-0656-4607-b671-65ace8da5d7b-run-httpd\") pod \"ceilometer-0\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " pod="openstack/ceilometer-0" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.922215 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a96afda1-0656-4607-b671-65ace8da5d7b-config-data\") pod \"ceilometer-0\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " pod="openstack/ceilometer-0" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.923341 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m592z\" (UniqueName: \"kubernetes.io/projected/a96afda1-0656-4607-b671-65ace8da5d7b-kube-api-access-m592z\") pod \"ceilometer-0\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " pod="openstack/ceilometer-0" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.934500 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a96afda1-0656-4607-b671-65ace8da5d7b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " pod="openstack/ceilometer-0" Jan 30 11:14:06 crc kubenswrapper[4869]: I0130 11:14:06.961806 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:14:07 crc kubenswrapper[4869]: I0130 11:14:07.401762 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:14:07 crc kubenswrapper[4869]: I0130 11:14:07.567096 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a96afda1-0656-4607-b671-65ace8da5d7b","Type":"ContainerStarted","Data":"ea6484236fce984114e62329c0dd223951465e62335a846210bcf924cf505152"} Jan 30 11:14:08 crc kubenswrapper[4869]: I0130 11:14:08.146134 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a25b91e3-ec86-4a99-b9e1-396b6c431b15" path="/var/lib/kubelet/pods/a25b91e3-ec86-4a99-b9e1-396b6c431b15/volumes" Jan 30 11:14:08 crc kubenswrapper[4869]: I0130 11:14:08.582569 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a96afda1-0656-4607-b671-65ace8da5d7b","Type":"ContainerStarted","Data":"3813d207347665549df9186414444bfd8577be70c90451254fc2a551674a5fb5"} Jan 30 11:14:09 crc kubenswrapper[4869]: I0130 11:14:09.597368 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a96afda1-0656-4607-b671-65ace8da5d7b","Type":"ContainerStarted","Data":"6fec61e9e5af194ad8d5c4caa520af3d1ae34a68f458cceeb429d1ac0596cd82"} Jan 30 11:14:10 crc kubenswrapper[4869]: I0130 11:14:10.615584 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a96afda1-0656-4607-b671-65ace8da5d7b","Type":"ContainerStarted","Data":"f6e1c7449026dcdea8b601bec90363c3293832f4ab8a7ef8b2ddf2504f46b659"} Jan 30 11:14:12 crc kubenswrapper[4869]: I0130 11:14:12.636467 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a96afda1-0656-4607-b671-65ace8da5d7b","Type":"ContainerStarted","Data":"3dc5aeba2c8fa1c20ec7a1d82a90b0d78b32bc2141d3a2eea87b0b28d89aec83"} Jan 30 11:14:12 crc kubenswrapper[4869]: I0130 11:14:12.636839 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 30 11:14:18 crc kubenswrapper[4869]: I0130 11:14:18.709877 4869 generic.go:334] "Generic (PLEG): container finished" podID="78dc88a3-3860-4c7f-acaf-5e2568a8761d" containerID="15f42f64893266a96f35bcf2c3e762836f56e50d156b31125fffc23d316c68c5" exitCode=0 Jan 30 11:14:18 crc kubenswrapper[4869]: I0130 11:14:18.710352 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-tfgnj" event={"ID":"78dc88a3-3860-4c7f-acaf-5e2568a8761d","Type":"ContainerDied","Data":"15f42f64893266a96f35bcf2c3e762836f56e50d156b31125fffc23d316c68c5"} Jan 30 11:14:18 crc kubenswrapper[4869]: I0130 11:14:18.730613 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=8.706660698 podStartE2EDuration="12.730587617s" podCreationTimestamp="2026-01-30 11:14:06 +0000 UTC" firstStartedPulling="2026-01-30 11:14:07.41099266 +0000 UTC m=+1197.960868736" lastFinishedPulling="2026-01-30 11:14:11.434919589 +0000 UTC m=+1201.984795655" observedRunningTime="2026-01-30 11:14:12.660937596 +0000 UTC m=+1203.210813682" watchObservedRunningTime="2026-01-30 11:14:18.730587617 +0000 UTC m=+1209.280463683" Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.060032 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-tfgnj" Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.153988 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jpksz\" (UniqueName: \"kubernetes.io/projected/78dc88a3-3860-4c7f-acaf-5e2568a8761d-kube-api-access-jpksz\") pod \"78dc88a3-3860-4c7f-acaf-5e2568a8761d\" (UID: \"78dc88a3-3860-4c7f-acaf-5e2568a8761d\") " Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.154132 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78dc88a3-3860-4c7f-acaf-5e2568a8761d-combined-ca-bundle\") pod \"78dc88a3-3860-4c7f-acaf-5e2568a8761d\" (UID: \"78dc88a3-3860-4c7f-acaf-5e2568a8761d\") " Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.154256 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78dc88a3-3860-4c7f-acaf-5e2568a8761d-config-data\") pod \"78dc88a3-3860-4c7f-acaf-5e2568a8761d\" (UID: \"78dc88a3-3860-4c7f-acaf-5e2568a8761d\") " Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.154299 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78dc88a3-3860-4c7f-acaf-5e2568a8761d-scripts\") pod \"78dc88a3-3860-4c7f-acaf-5e2568a8761d\" (UID: \"78dc88a3-3860-4c7f-acaf-5e2568a8761d\") " Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.164004 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78dc88a3-3860-4c7f-acaf-5e2568a8761d-kube-api-access-jpksz" (OuterVolumeSpecName: "kube-api-access-jpksz") pod "78dc88a3-3860-4c7f-acaf-5e2568a8761d" (UID: "78dc88a3-3860-4c7f-acaf-5e2568a8761d"). InnerVolumeSpecName "kube-api-access-jpksz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.181383 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78dc88a3-3860-4c7f-acaf-5e2568a8761d-scripts" (OuterVolumeSpecName: "scripts") pod "78dc88a3-3860-4c7f-acaf-5e2568a8761d" (UID: "78dc88a3-3860-4c7f-acaf-5e2568a8761d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:20 crc kubenswrapper[4869]: E0130 11:14:20.193295 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/78dc88a3-3860-4c7f-acaf-5e2568a8761d-config-data podName:78dc88a3-3860-4c7f-acaf-5e2568a8761d nodeName:}" failed. No retries permitted until 2026-01-30 11:14:20.693259351 +0000 UTC m=+1211.243135417 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config-data" (UniqueName: "kubernetes.io/secret/78dc88a3-3860-4c7f-acaf-5e2568a8761d-config-data") pod "78dc88a3-3860-4c7f-acaf-5e2568a8761d" (UID: "78dc88a3-3860-4c7f-acaf-5e2568a8761d") : error deleting /var/lib/kubelet/pods/78dc88a3-3860-4c7f-acaf-5e2568a8761d/volume-subpaths: remove /var/lib/kubelet/pods/78dc88a3-3860-4c7f-acaf-5e2568a8761d/volume-subpaths: no such file or directory Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.197190 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78dc88a3-3860-4c7f-acaf-5e2568a8761d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "78dc88a3-3860-4c7f-acaf-5e2568a8761d" (UID: "78dc88a3-3860-4c7f-acaf-5e2568a8761d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.256199 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78dc88a3-3860-4c7f-acaf-5e2568a8761d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.256240 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78dc88a3-3860-4c7f-acaf-5e2568a8761d-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.256253 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jpksz\" (UniqueName: \"kubernetes.io/projected/78dc88a3-3860-4c7f-acaf-5e2568a8761d-kube-api-access-jpksz\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.728535 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-tfgnj" event={"ID":"78dc88a3-3860-4c7f-acaf-5e2568a8761d","Type":"ContainerDied","Data":"6da5592480d95caca68a04f8a8b10cdf9fb4b942d7848fed546d69bc5a1fd38a"} Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.728850 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6da5592480d95caca68a04f8a8b10cdf9fb4b942d7848fed546d69bc5a1fd38a" Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.728594 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-tfgnj" Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.764764 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78dc88a3-3860-4c7f-acaf-5e2568a8761d-config-data\") pod \"78dc88a3-3860-4c7f-acaf-5e2568a8761d\" (UID: \"78dc88a3-3860-4c7f-acaf-5e2568a8761d\") " Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.769958 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78dc88a3-3860-4c7f-acaf-5e2568a8761d-config-data" (OuterVolumeSpecName: "config-data") pod "78dc88a3-3860-4c7f-acaf-5e2568a8761d" (UID: "78dc88a3-3860-4c7f-acaf-5e2568a8761d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.818482 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 30 11:14:20 crc kubenswrapper[4869]: E0130 11:14:20.818900 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78dc88a3-3860-4c7f-acaf-5e2568a8761d" containerName="nova-cell0-conductor-db-sync" Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.818921 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="78dc88a3-3860-4c7f-acaf-5e2568a8761d" containerName="nova-cell0-conductor-db-sync" Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.819131 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="78dc88a3-3860-4c7f-acaf-5e2568a8761d" containerName="nova-cell0-conductor-db-sync" Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.819811 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.839255 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.867129 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba3b2f08-608c-49db-b58c-f20480a51bba-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ba3b2f08-608c-49db-b58c-f20480a51bba\") " pod="openstack/nova-cell0-conductor-0" Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.867191 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbx67\" (UniqueName: \"kubernetes.io/projected/ba3b2f08-608c-49db-b58c-f20480a51bba-kube-api-access-tbx67\") pod \"nova-cell0-conductor-0\" (UID: \"ba3b2f08-608c-49db-b58c-f20480a51bba\") " pod="openstack/nova-cell0-conductor-0" Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.867220 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba3b2f08-608c-49db-b58c-f20480a51bba-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ba3b2f08-608c-49db-b58c-f20480a51bba\") " pod="openstack/nova-cell0-conductor-0" Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.867305 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78dc88a3-3860-4c7f-acaf-5e2568a8761d-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.968886 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba3b2f08-608c-49db-b58c-f20480a51bba-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ba3b2f08-608c-49db-b58c-f20480a51bba\") " pod="openstack/nova-cell0-conductor-0" Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.968955 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbx67\" (UniqueName: \"kubernetes.io/projected/ba3b2f08-608c-49db-b58c-f20480a51bba-kube-api-access-tbx67\") pod \"nova-cell0-conductor-0\" (UID: \"ba3b2f08-608c-49db-b58c-f20480a51bba\") " pod="openstack/nova-cell0-conductor-0" Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.968991 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba3b2f08-608c-49db-b58c-f20480a51bba-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ba3b2f08-608c-49db-b58c-f20480a51bba\") " pod="openstack/nova-cell0-conductor-0" Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.972966 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba3b2f08-608c-49db-b58c-f20480a51bba-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ba3b2f08-608c-49db-b58c-f20480a51bba\") " pod="openstack/nova-cell0-conductor-0" Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.976395 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba3b2f08-608c-49db-b58c-f20480a51bba-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ba3b2f08-608c-49db-b58c-f20480a51bba\") " pod="openstack/nova-cell0-conductor-0" Jan 30 11:14:20 crc kubenswrapper[4869]: I0130 11:14:20.985956 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbx67\" (UniqueName: \"kubernetes.io/projected/ba3b2f08-608c-49db-b58c-f20480a51bba-kube-api-access-tbx67\") pod \"nova-cell0-conductor-0\" (UID: \"ba3b2f08-608c-49db-b58c-f20480a51bba\") " pod="openstack/nova-cell0-conductor-0" Jan 30 11:14:21 crc kubenswrapper[4869]: I0130 11:14:21.138927 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 30 11:14:21 crc kubenswrapper[4869]: I0130 11:14:21.606798 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 30 11:14:21 crc kubenswrapper[4869]: W0130 11:14:21.612926 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba3b2f08_608c_49db_b58c_f20480a51bba.slice/crio-e06db928b0be9812af36d023e4a2bb0bb10824f7c51148f1fd9d9b6abee6e978 WatchSource:0}: Error finding container e06db928b0be9812af36d023e4a2bb0bb10824f7c51148f1fd9d9b6abee6e978: Status 404 returned error can't find the container with id e06db928b0be9812af36d023e4a2bb0bb10824f7c51148f1fd9d9b6abee6e978 Jan 30 11:14:21 crc kubenswrapper[4869]: I0130 11:14:21.737127 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ba3b2f08-608c-49db-b58c-f20480a51bba","Type":"ContainerStarted","Data":"e06db928b0be9812af36d023e4a2bb0bb10824f7c51148f1fd9d9b6abee6e978"} Jan 30 11:14:21 crc kubenswrapper[4869]: I0130 11:14:21.769790 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:14:21 crc kubenswrapper[4869]: I0130 11:14:21.769860 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:14:22 crc kubenswrapper[4869]: I0130 11:14:22.745813 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ba3b2f08-608c-49db-b58c-f20480a51bba","Type":"ContainerStarted","Data":"8df9d8157f152c454f2d5faa24504c9f3a81b6b09571d72c7ebea240aaa6074b"} Jan 30 11:14:22 crc kubenswrapper[4869]: I0130 11:14:22.747067 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Jan 30 11:14:22 crc kubenswrapper[4869]: I0130 11:14:22.769410 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.769390607 podStartE2EDuration="2.769390607s" podCreationTimestamp="2026-01-30 11:14:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:14:22.767543415 +0000 UTC m=+1213.317419481" watchObservedRunningTime="2026-01-30 11:14:22.769390607 +0000 UTC m=+1213.319266673" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.165595 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.622113 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-vwj7x"] Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.623568 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-vwj7x" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.627103 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.627301 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.647784 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-vwj7x"] Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.673819 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48c5632d-6e67-4014-9360-c8932146c432-scripts\") pod \"nova-cell0-cell-mapping-vwj7x\" (UID: \"48c5632d-6e67-4014-9360-c8932146c432\") " pod="openstack/nova-cell0-cell-mapping-vwj7x" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.673915 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48c5632d-6e67-4014-9360-c8932146c432-config-data\") pod \"nova-cell0-cell-mapping-vwj7x\" (UID: \"48c5632d-6e67-4014-9360-c8932146c432\") " pod="openstack/nova-cell0-cell-mapping-vwj7x" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.673955 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9px5f\" (UniqueName: \"kubernetes.io/projected/48c5632d-6e67-4014-9360-c8932146c432-kube-api-access-9px5f\") pod \"nova-cell0-cell-mapping-vwj7x\" (UID: \"48c5632d-6e67-4014-9360-c8932146c432\") " pod="openstack/nova-cell0-cell-mapping-vwj7x" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.674046 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48c5632d-6e67-4014-9360-c8932146c432-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-vwj7x\" (UID: \"48c5632d-6e67-4014-9360-c8932146c432\") " pod="openstack/nova-cell0-cell-mapping-vwj7x" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.775651 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48c5632d-6e67-4014-9360-c8932146c432-scripts\") pod \"nova-cell0-cell-mapping-vwj7x\" (UID: \"48c5632d-6e67-4014-9360-c8932146c432\") " pod="openstack/nova-cell0-cell-mapping-vwj7x" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.775751 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48c5632d-6e67-4014-9360-c8932146c432-config-data\") pod \"nova-cell0-cell-mapping-vwj7x\" (UID: \"48c5632d-6e67-4014-9360-c8932146c432\") " pod="openstack/nova-cell0-cell-mapping-vwj7x" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.775785 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9px5f\" (UniqueName: \"kubernetes.io/projected/48c5632d-6e67-4014-9360-c8932146c432-kube-api-access-9px5f\") pod \"nova-cell0-cell-mapping-vwj7x\" (UID: \"48c5632d-6e67-4014-9360-c8932146c432\") " pod="openstack/nova-cell0-cell-mapping-vwj7x" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.775854 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48c5632d-6e67-4014-9360-c8932146c432-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-vwj7x\" (UID: \"48c5632d-6e67-4014-9360-c8932146c432\") " pod="openstack/nova-cell0-cell-mapping-vwj7x" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.784478 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48c5632d-6e67-4014-9360-c8932146c432-scripts\") pod \"nova-cell0-cell-mapping-vwj7x\" (UID: \"48c5632d-6e67-4014-9360-c8932146c432\") " pod="openstack/nova-cell0-cell-mapping-vwj7x" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.785484 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48c5632d-6e67-4014-9360-c8932146c432-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-vwj7x\" (UID: \"48c5632d-6e67-4014-9360-c8932146c432\") " pod="openstack/nova-cell0-cell-mapping-vwj7x" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.786291 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48c5632d-6e67-4014-9360-c8932146c432-config-data\") pod \"nova-cell0-cell-mapping-vwj7x\" (UID: \"48c5632d-6e67-4014-9360-c8932146c432\") " pod="openstack/nova-cell0-cell-mapping-vwj7x" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.807762 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9px5f\" (UniqueName: \"kubernetes.io/projected/48c5632d-6e67-4014-9360-c8932146c432-kube-api-access-9px5f\") pod \"nova-cell0-cell-mapping-vwj7x\" (UID: \"48c5632d-6e67-4014-9360-c8932146c432\") " pod="openstack/nova-cell0-cell-mapping-vwj7x" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.829535 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.831919 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.833305 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.833932 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.845436 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.845761 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.877022 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbbkm\" (UniqueName: \"kubernetes.io/projected/ccf24997-0c26-4cf8-87d3-52791cd4680c-kube-api-access-rbbkm\") pod \"nova-api-0\" (UID: \"ccf24997-0c26-4cf8-87d3-52791cd4680c\") " pod="openstack/nova-api-0" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.877083 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/edd1fc1c-30e8-4e38-890a-0ee566ce2c26-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"edd1fc1c-30e8-4e38-890a-0ee566ce2c26\") " pod="openstack/nova-metadata-0" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.877106 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ccf24997-0c26-4cf8-87d3-52791cd4680c-logs\") pod \"nova-api-0\" (UID: \"ccf24997-0c26-4cf8-87d3-52791cd4680c\") " pod="openstack/nova-api-0" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.877146 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xqd2\" (UniqueName: \"kubernetes.io/projected/edd1fc1c-30e8-4e38-890a-0ee566ce2c26-kube-api-access-6xqd2\") pod \"nova-metadata-0\" (UID: \"edd1fc1c-30e8-4e38-890a-0ee566ce2c26\") " pod="openstack/nova-metadata-0" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.877172 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/edd1fc1c-30e8-4e38-890a-0ee566ce2c26-config-data\") pod \"nova-metadata-0\" (UID: \"edd1fc1c-30e8-4e38-890a-0ee566ce2c26\") " pod="openstack/nova-metadata-0" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.877193 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccf24997-0c26-4cf8-87d3-52791cd4680c-config-data\") pod \"nova-api-0\" (UID: \"ccf24997-0c26-4cf8-87d3-52791cd4680c\") " pod="openstack/nova-api-0" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.877224 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccf24997-0c26-4cf8-87d3-52791cd4680c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ccf24997-0c26-4cf8-87d3-52791cd4680c\") " pod="openstack/nova-api-0" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.877250 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/edd1fc1c-30e8-4e38-890a-0ee566ce2c26-logs\") pod \"nova-metadata-0\" (UID: \"edd1fc1c-30e8-4e38-890a-0ee566ce2c26\") " pod="openstack/nova-metadata-0" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.905470 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.948266 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-vwj7x" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.958231 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.978307 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbbkm\" (UniqueName: \"kubernetes.io/projected/ccf24997-0c26-4cf8-87d3-52791cd4680c-kube-api-access-rbbkm\") pod \"nova-api-0\" (UID: \"ccf24997-0c26-4cf8-87d3-52791cd4680c\") " pod="openstack/nova-api-0" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.978350 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/edd1fc1c-30e8-4e38-890a-0ee566ce2c26-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"edd1fc1c-30e8-4e38-890a-0ee566ce2c26\") " pod="openstack/nova-metadata-0" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.978367 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ccf24997-0c26-4cf8-87d3-52791cd4680c-logs\") pod \"nova-api-0\" (UID: \"ccf24997-0c26-4cf8-87d3-52791cd4680c\") " pod="openstack/nova-api-0" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.978394 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xqd2\" (UniqueName: \"kubernetes.io/projected/edd1fc1c-30e8-4e38-890a-0ee566ce2c26-kube-api-access-6xqd2\") pod \"nova-metadata-0\" (UID: \"edd1fc1c-30e8-4e38-890a-0ee566ce2c26\") " pod="openstack/nova-metadata-0" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.978412 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/edd1fc1c-30e8-4e38-890a-0ee566ce2c26-config-data\") pod \"nova-metadata-0\" (UID: \"edd1fc1c-30e8-4e38-890a-0ee566ce2c26\") " pod="openstack/nova-metadata-0" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.978428 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccf24997-0c26-4cf8-87d3-52791cd4680c-config-data\") pod \"nova-api-0\" (UID: \"ccf24997-0c26-4cf8-87d3-52791cd4680c\") " pod="openstack/nova-api-0" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.978453 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccf24997-0c26-4cf8-87d3-52791cd4680c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ccf24997-0c26-4cf8-87d3-52791cd4680c\") " pod="openstack/nova-api-0" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.978474 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/edd1fc1c-30e8-4e38-890a-0ee566ce2c26-logs\") pod \"nova-metadata-0\" (UID: \"edd1fc1c-30e8-4e38-890a-0ee566ce2c26\") " pod="openstack/nova-metadata-0" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.980154 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ccf24997-0c26-4cf8-87d3-52791cd4680c-logs\") pod \"nova-api-0\" (UID: \"ccf24997-0c26-4cf8-87d3-52791cd4680c\") " pod="openstack/nova-api-0" Jan 30 11:14:26 crc kubenswrapper[4869]: I0130 11:14:26.983019 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/edd1fc1c-30e8-4e38-890a-0ee566ce2c26-logs\") pod \"nova-metadata-0\" (UID: \"edd1fc1c-30e8-4e38-890a-0ee566ce2c26\") " pod="openstack/nova-metadata-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.001403 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/edd1fc1c-30e8-4e38-890a-0ee566ce2c26-config-data\") pod \"nova-metadata-0\" (UID: \"edd1fc1c-30e8-4e38-890a-0ee566ce2c26\") " pod="openstack/nova-metadata-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.007777 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccf24997-0c26-4cf8-87d3-52791cd4680c-config-data\") pod \"nova-api-0\" (UID: \"ccf24997-0c26-4cf8-87d3-52791cd4680c\") " pod="openstack/nova-api-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.021873 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccf24997-0c26-4cf8-87d3-52791cd4680c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ccf24997-0c26-4cf8-87d3-52791cd4680c\") " pod="openstack/nova-api-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.025933 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/edd1fc1c-30e8-4e38-890a-0ee566ce2c26-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"edd1fc1c-30e8-4e38-890a-0ee566ce2c26\") " pod="openstack/nova-metadata-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.048442 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbbkm\" (UniqueName: \"kubernetes.io/projected/ccf24997-0c26-4cf8-87d3-52791cd4680c-kube-api-access-rbbkm\") pod \"nova-api-0\" (UID: \"ccf24997-0c26-4cf8-87d3-52791cd4680c\") " pod="openstack/nova-api-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.050516 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xqd2\" (UniqueName: \"kubernetes.io/projected/edd1fc1c-30e8-4e38-890a-0ee566ce2c26-kube-api-access-6xqd2\") pod \"nova-metadata-0\" (UID: \"edd1fc1c-30e8-4e38-890a-0ee566ce2c26\") " pod="openstack/nova-metadata-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.075897 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-2jdxp"] Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.077729 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.100334 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.101869 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.106837 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.130009 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-2jdxp"] Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.164295 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.183568 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-config\") pod \"dnsmasq-dns-bccf8f775-2jdxp\" (UID: \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\") " pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.183695 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-2jdxp\" (UID: \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\") " pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.183767 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/388d68d4-4b57-458f-9d18-3989cee16c04-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"388d68d4-4b57-458f-9d18-3989cee16c04\") " pod="openstack/nova-scheduler-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.185636 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/388d68d4-4b57-458f-9d18-3989cee16c04-config-data\") pod \"nova-scheduler-0\" (UID: \"388d68d4-4b57-458f-9d18-3989cee16c04\") " pod="openstack/nova-scheduler-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.186578 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-2jdxp\" (UID: \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\") " pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.186658 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-dns-svc\") pod \"dnsmasq-dns-bccf8f775-2jdxp\" (UID: \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\") " pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.186878 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sk8jz\" (UniqueName: \"kubernetes.io/projected/5c574192-b2fc-42ad-980f-ca5d42f51ac7-kube-api-access-sk8jz\") pod \"dnsmasq-dns-bccf8f775-2jdxp\" (UID: \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\") " pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.187671 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-2jdxp\" (UID: \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\") " pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.187839 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhdzv\" (UniqueName: \"kubernetes.io/projected/388d68d4-4b57-458f-9d18-3989cee16c04-kube-api-access-bhdzv\") pod \"nova-scheduler-0\" (UID: \"388d68d4-4b57-458f-9d18-3989cee16c04\") " pod="openstack/nova-scheduler-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.218579 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.242451 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.274805 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.277011 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.281567 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.289397 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-2jdxp\" (UID: \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\") " pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.289464 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhdzv\" (UniqueName: \"kubernetes.io/projected/388d68d4-4b57-458f-9d18-3989cee16c04-kube-api-access-bhdzv\") pod \"nova-scheduler-0\" (UID: \"388d68d4-4b57-458f-9d18-3989cee16c04\") " pod="openstack/nova-scheduler-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.289505 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8de6ec04-fd9e-4901-a3f8-39a0d71e9d18-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8de6ec04-fd9e-4901-a3f8-39a0d71e9d18\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.289524 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-config\") pod \"dnsmasq-dns-bccf8f775-2jdxp\" (UID: \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\") " pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.289565 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-2jdxp\" (UID: \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\") " pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.289596 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/388d68d4-4b57-458f-9d18-3989cee16c04-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"388d68d4-4b57-458f-9d18-3989cee16c04\") " pod="openstack/nova-scheduler-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.289617 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bldth\" (UniqueName: \"kubernetes.io/projected/8de6ec04-fd9e-4901-a3f8-39a0d71e9d18-kube-api-access-bldth\") pod \"nova-cell1-novncproxy-0\" (UID: \"8de6ec04-fd9e-4901-a3f8-39a0d71e9d18\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.289645 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/388d68d4-4b57-458f-9d18-3989cee16c04-config-data\") pod \"nova-scheduler-0\" (UID: \"388d68d4-4b57-458f-9d18-3989cee16c04\") " pod="openstack/nova-scheduler-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.289661 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8de6ec04-fd9e-4901-a3f8-39a0d71e9d18-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8de6ec04-fd9e-4901-a3f8-39a0d71e9d18\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.289697 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-2jdxp\" (UID: \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\") " pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.289742 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-dns-svc\") pod \"dnsmasq-dns-bccf8f775-2jdxp\" (UID: \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\") " pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.289774 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sk8jz\" (UniqueName: \"kubernetes.io/projected/5c574192-b2fc-42ad-980f-ca5d42f51ac7-kube-api-access-sk8jz\") pod \"dnsmasq-dns-bccf8f775-2jdxp\" (UID: \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\") " pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.291041 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-2jdxp\" (UID: \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\") " pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.301150 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-config\") pod \"dnsmasq-dns-bccf8f775-2jdxp\" (UID: \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\") " pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.302576 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-dns-svc\") pod \"dnsmasq-dns-bccf8f775-2jdxp\" (UID: \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\") " pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.302652 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-2jdxp\" (UID: \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\") " pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.304069 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/388d68d4-4b57-458f-9d18-3989cee16c04-config-data\") pod \"nova-scheduler-0\" (UID: \"388d68d4-4b57-458f-9d18-3989cee16c04\") " pod="openstack/nova-scheduler-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.305106 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-2jdxp\" (UID: \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\") " pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.306837 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sk8jz\" (UniqueName: \"kubernetes.io/projected/5c574192-b2fc-42ad-980f-ca5d42f51ac7-kube-api-access-sk8jz\") pod \"dnsmasq-dns-bccf8f775-2jdxp\" (UID: \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\") " pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.311366 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/388d68d4-4b57-458f-9d18-3989cee16c04-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"388d68d4-4b57-458f-9d18-3989cee16c04\") " pod="openstack/nova-scheduler-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.312548 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhdzv\" (UniqueName: \"kubernetes.io/projected/388d68d4-4b57-458f-9d18-3989cee16c04-kube-api-access-bhdzv\") pod \"nova-scheduler-0\" (UID: \"388d68d4-4b57-458f-9d18-3989cee16c04\") " pod="openstack/nova-scheduler-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.313977 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.397416 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8de6ec04-fd9e-4901-a3f8-39a0d71e9d18-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8de6ec04-fd9e-4901-a3f8-39a0d71e9d18\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.397614 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bldth\" (UniqueName: \"kubernetes.io/projected/8de6ec04-fd9e-4901-a3f8-39a0d71e9d18-kube-api-access-bldth\") pod \"nova-cell1-novncproxy-0\" (UID: \"8de6ec04-fd9e-4901-a3f8-39a0d71e9d18\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.401000 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8de6ec04-fd9e-4901-a3f8-39a0d71e9d18-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8de6ec04-fd9e-4901-a3f8-39a0d71e9d18\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.401996 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8de6ec04-fd9e-4901-a3f8-39a0d71e9d18-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8de6ec04-fd9e-4901-a3f8-39a0d71e9d18\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.407544 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8de6ec04-fd9e-4901-a3f8-39a0d71e9d18-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8de6ec04-fd9e-4901-a3f8-39a0d71e9d18\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.426898 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bldth\" (UniqueName: \"kubernetes.io/projected/8de6ec04-fd9e-4901-a3f8-39a0d71e9d18-kube-api-access-bldth\") pod \"nova-cell1-novncproxy-0\" (UID: \"8de6ec04-fd9e-4901-a3f8-39a0d71e9d18\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.455109 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.507808 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.613508 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-vwj7x"] Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.696253 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.809798 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-s8qnc"] Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.811578 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-s8qnc" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.815462 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.822594 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.829178 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-vwj7x" event={"ID":"48c5632d-6e67-4014-9360-c8932146c432","Type":"ContainerStarted","Data":"f5c272d93448ba143eb3431f9f90a84d975d3aa0673e755959c906a30344e2e1"} Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.871400 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-s8qnc"] Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.924975 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bx8q6\" (UniqueName: \"kubernetes.io/projected/42a310a9-b061-4d7a-9644-5f1303fc5c15-kube-api-access-bx8q6\") pod \"nova-cell1-conductor-db-sync-s8qnc\" (UID: \"42a310a9-b061-4d7a-9644-5f1303fc5c15\") " pod="openstack/nova-cell1-conductor-db-sync-s8qnc" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.925060 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42a310a9-b061-4d7a-9644-5f1303fc5c15-config-data\") pod \"nova-cell1-conductor-db-sync-s8qnc\" (UID: \"42a310a9-b061-4d7a-9644-5f1303fc5c15\") " pod="openstack/nova-cell1-conductor-db-sync-s8qnc" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.925125 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/42a310a9-b061-4d7a-9644-5f1303fc5c15-scripts\") pod \"nova-cell1-conductor-db-sync-s8qnc\" (UID: \"42a310a9-b061-4d7a-9644-5f1303fc5c15\") " pod="openstack/nova-cell1-conductor-db-sync-s8qnc" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.926399 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42a310a9-b061-4d7a-9644-5f1303fc5c15-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-s8qnc\" (UID: \"42a310a9-b061-4d7a-9644-5f1303fc5c15\") " pod="openstack/nova-cell1-conductor-db-sync-s8qnc" Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.933132 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 11:14:27 crc kubenswrapper[4869]: I0130 11:14:27.943044 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 30 11:14:28 crc kubenswrapper[4869]: I0130 11:14:28.028985 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42a310a9-b061-4d7a-9644-5f1303fc5c15-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-s8qnc\" (UID: \"42a310a9-b061-4d7a-9644-5f1303fc5c15\") " pod="openstack/nova-cell1-conductor-db-sync-s8qnc" Jan 30 11:14:28 crc kubenswrapper[4869]: I0130 11:14:28.029629 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bx8q6\" (UniqueName: \"kubernetes.io/projected/42a310a9-b061-4d7a-9644-5f1303fc5c15-kube-api-access-bx8q6\") pod \"nova-cell1-conductor-db-sync-s8qnc\" (UID: \"42a310a9-b061-4d7a-9644-5f1303fc5c15\") " pod="openstack/nova-cell1-conductor-db-sync-s8qnc" Jan 30 11:14:28 crc kubenswrapper[4869]: I0130 11:14:28.029670 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42a310a9-b061-4d7a-9644-5f1303fc5c15-config-data\") pod \"nova-cell1-conductor-db-sync-s8qnc\" (UID: \"42a310a9-b061-4d7a-9644-5f1303fc5c15\") " pod="openstack/nova-cell1-conductor-db-sync-s8qnc" Jan 30 11:14:28 crc kubenswrapper[4869]: I0130 11:14:28.029736 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/42a310a9-b061-4d7a-9644-5f1303fc5c15-scripts\") pod \"nova-cell1-conductor-db-sync-s8qnc\" (UID: \"42a310a9-b061-4d7a-9644-5f1303fc5c15\") " pod="openstack/nova-cell1-conductor-db-sync-s8qnc" Jan 30 11:14:28 crc kubenswrapper[4869]: I0130 11:14:28.053850 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/42a310a9-b061-4d7a-9644-5f1303fc5c15-scripts\") pod \"nova-cell1-conductor-db-sync-s8qnc\" (UID: \"42a310a9-b061-4d7a-9644-5f1303fc5c15\") " pod="openstack/nova-cell1-conductor-db-sync-s8qnc" Jan 30 11:14:28 crc kubenswrapper[4869]: I0130 11:14:28.054533 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42a310a9-b061-4d7a-9644-5f1303fc5c15-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-s8qnc\" (UID: \"42a310a9-b061-4d7a-9644-5f1303fc5c15\") " pod="openstack/nova-cell1-conductor-db-sync-s8qnc" Jan 30 11:14:28 crc kubenswrapper[4869]: I0130 11:14:28.057768 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bx8q6\" (UniqueName: \"kubernetes.io/projected/42a310a9-b061-4d7a-9644-5f1303fc5c15-kube-api-access-bx8q6\") pod \"nova-cell1-conductor-db-sync-s8qnc\" (UID: \"42a310a9-b061-4d7a-9644-5f1303fc5c15\") " pod="openstack/nova-cell1-conductor-db-sync-s8qnc" Jan 30 11:14:28 crc kubenswrapper[4869]: I0130 11:14:28.057837 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42a310a9-b061-4d7a-9644-5f1303fc5c15-config-data\") pod \"nova-cell1-conductor-db-sync-s8qnc\" (UID: \"42a310a9-b061-4d7a-9644-5f1303fc5c15\") " pod="openstack/nova-cell1-conductor-db-sync-s8qnc" Jan 30 11:14:28 crc kubenswrapper[4869]: I0130 11:14:28.072775 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-2jdxp"] Jan 30 11:14:28 crc kubenswrapper[4869]: W0130 11:14:28.104166 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c574192_b2fc_42ad_980f_ca5d42f51ac7.slice/crio-dccec6ed2867a6c4ae2b0fd65764eb562f8d62dc3f17b068b29100504d861b20 WatchSource:0}: Error finding container dccec6ed2867a6c4ae2b0fd65764eb562f8d62dc3f17b068b29100504d861b20: Status 404 returned error can't find the container with id dccec6ed2867a6c4ae2b0fd65764eb562f8d62dc3f17b068b29100504d861b20 Jan 30 11:14:28 crc kubenswrapper[4869]: I0130 11:14:28.253331 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 11:14:28 crc kubenswrapper[4869]: I0130 11:14:28.268374 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-s8qnc" Jan 30 11:14:28 crc kubenswrapper[4869]: W0130 11:14:28.283469 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod388d68d4_4b57_458f_9d18_3989cee16c04.slice/crio-e2afd93a8474edb4ac1795792a3d8a122f825fc5637d34bc98fa11a43795a1ef WatchSource:0}: Error finding container e2afd93a8474edb4ac1795792a3d8a122f825fc5637d34bc98fa11a43795a1ef: Status 404 returned error can't find the container with id e2afd93a8474edb4ac1795792a3d8a122f825fc5637d34bc98fa11a43795a1ef Jan 30 11:14:28 crc kubenswrapper[4869]: I0130 11:14:28.377746 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 30 11:14:28 crc kubenswrapper[4869]: I0130 11:14:28.727153 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-s8qnc"] Jan 30 11:14:28 crc kubenswrapper[4869]: I0130 11:14:28.853808 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8de6ec04-fd9e-4901-a3f8-39a0d71e9d18","Type":"ContainerStarted","Data":"2e61d7139895c560297e90f35607f1588b9a2984dd509eb1384aea974307a074"} Jan 30 11:14:28 crc kubenswrapper[4869]: I0130 11:14:28.856595 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ccf24997-0c26-4cf8-87d3-52791cd4680c","Type":"ContainerStarted","Data":"569cce4c001bb6a35c526a9a5d4a35b1606c4591ba9511c8b0116baec1ea45d6"} Jan 30 11:14:28 crc kubenswrapper[4869]: I0130 11:14:28.858018 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"388d68d4-4b57-458f-9d18-3989cee16c04","Type":"ContainerStarted","Data":"e2afd93a8474edb4ac1795792a3d8a122f825fc5637d34bc98fa11a43795a1ef"} Jan 30 11:14:28 crc kubenswrapper[4869]: I0130 11:14:28.858981 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"edd1fc1c-30e8-4e38-890a-0ee566ce2c26","Type":"ContainerStarted","Data":"7d2cf9285f13d1024ca89a8a8a6005bfa45a74937c7a2409fc699c0601f9c17a"} Jan 30 11:14:28 crc kubenswrapper[4869]: I0130 11:14:28.860757 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-s8qnc" event={"ID":"42a310a9-b061-4d7a-9644-5f1303fc5c15","Type":"ContainerStarted","Data":"4e7bb0b2d6b96c8f1c84872ee4b069d9cc40f863801dacda19be1c19a91aaa75"} Jan 30 11:14:28 crc kubenswrapper[4869]: I0130 11:14:28.863778 4869 generic.go:334] "Generic (PLEG): container finished" podID="5c574192-b2fc-42ad-980f-ca5d42f51ac7" containerID="e46ec962f21b84f5634e321b99b8413f420ec0f1c647a411f5941754d794789c" exitCode=0 Jan 30 11:14:28 crc kubenswrapper[4869]: I0130 11:14:28.863860 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" event={"ID":"5c574192-b2fc-42ad-980f-ca5d42f51ac7","Type":"ContainerDied","Data":"e46ec962f21b84f5634e321b99b8413f420ec0f1c647a411f5941754d794789c"} Jan 30 11:14:28 crc kubenswrapper[4869]: I0130 11:14:28.863891 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" event={"ID":"5c574192-b2fc-42ad-980f-ca5d42f51ac7","Type":"ContainerStarted","Data":"dccec6ed2867a6c4ae2b0fd65764eb562f8d62dc3f17b068b29100504d861b20"} Jan 30 11:14:28 crc kubenswrapper[4869]: I0130 11:14:28.865952 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-vwj7x" event={"ID":"48c5632d-6e67-4014-9360-c8932146c432","Type":"ContainerStarted","Data":"e62e2bc94f8e1379e29cdd03d3712ba5ca06e49d50685909a4a864d05f0bc5c7"} Jan 30 11:14:28 crc kubenswrapper[4869]: I0130 11:14:28.903209 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-vwj7x" podStartSLOduration=2.903189669 podStartE2EDuration="2.903189669s" podCreationTimestamp="2026-01-30 11:14:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:14:28.899484574 +0000 UTC m=+1219.449360660" watchObservedRunningTime="2026-01-30 11:14:28.903189669 +0000 UTC m=+1219.453065735" Jan 30 11:14:29 crc kubenswrapper[4869]: I0130 11:14:29.878474 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-s8qnc" event={"ID":"42a310a9-b061-4d7a-9644-5f1303fc5c15","Type":"ContainerStarted","Data":"e54d60216b7cd4ff9bf216e525d3bff77639b9e99c69e0a57242e96c6750aee3"} Jan 30 11:14:29 crc kubenswrapper[4869]: I0130 11:14:29.892139 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" event={"ID":"5c574192-b2fc-42ad-980f-ca5d42f51ac7","Type":"ContainerStarted","Data":"e85400675ab49c958a87fc830f35df0dee7d933272c59fcdb8ba4741cb390fd8"} Jan 30 11:14:29 crc kubenswrapper[4869]: I0130 11:14:29.926205 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-s8qnc" podStartSLOduration=2.926174993 podStartE2EDuration="2.926174993s" podCreationTimestamp="2026-01-30 11:14:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:14:29.901879854 +0000 UTC m=+1220.451755920" watchObservedRunningTime="2026-01-30 11:14:29.926174993 +0000 UTC m=+1220.476051059" Jan 30 11:14:29 crc kubenswrapper[4869]: I0130 11:14:29.933864 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" podStartSLOduration=3.933846881 podStartE2EDuration="3.933846881s" podCreationTimestamp="2026-01-30 11:14:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:14:29.921916883 +0000 UTC m=+1220.471792949" watchObservedRunningTime="2026-01-30 11:14:29.933846881 +0000 UTC m=+1220.483722947" Jan 30 11:14:30 crc kubenswrapper[4869]: I0130 11:14:30.904915 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" Jan 30 11:14:31 crc kubenswrapper[4869]: I0130 11:14:31.174741 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 30 11:14:31 crc kubenswrapper[4869]: I0130 11:14:31.211338 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 11:14:31 crc kubenswrapper[4869]: I0130 11:14:31.931869 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"edd1fc1c-30e8-4e38-890a-0ee566ce2c26","Type":"ContainerStarted","Data":"43800f5b8fd42a51379160cf8be9f1b110845f17a58b4c52202d9d148ddfe584"} Jan 30 11:14:31 crc kubenswrapper[4869]: I0130 11:14:31.931919 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"edd1fc1c-30e8-4e38-890a-0ee566ce2c26","Type":"ContainerStarted","Data":"4626905124e99f10e2c5f28edc59acfbf95799850d07f7c842f5831dedfbadd7"} Jan 30 11:14:31 crc kubenswrapper[4869]: I0130 11:14:31.932060 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="edd1fc1c-30e8-4e38-890a-0ee566ce2c26" containerName="nova-metadata-log" containerID="cri-o://4626905124e99f10e2c5f28edc59acfbf95799850d07f7c842f5831dedfbadd7" gracePeriod=30 Jan 30 11:14:31 crc kubenswrapper[4869]: I0130 11:14:31.932613 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="edd1fc1c-30e8-4e38-890a-0ee566ce2c26" containerName="nova-metadata-metadata" containerID="cri-o://43800f5b8fd42a51379160cf8be9f1b110845f17a58b4c52202d9d148ddfe584" gracePeriod=30 Jan 30 11:14:31 crc kubenswrapper[4869]: I0130 11:14:31.938798 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8de6ec04-fd9e-4901-a3f8-39a0d71e9d18","Type":"ContainerStarted","Data":"ce6cd42b5b4b6d06c455c6e010b86b0d5777baacce1dfa088fa570a20f7516b5"} Jan 30 11:14:31 crc kubenswrapper[4869]: I0130 11:14:31.938936 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="8de6ec04-fd9e-4901-a3f8-39a0d71e9d18" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://ce6cd42b5b4b6d06c455c6e010b86b0d5777baacce1dfa088fa570a20f7516b5" gracePeriod=30 Jan 30 11:14:31 crc kubenswrapper[4869]: I0130 11:14:31.945768 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ccf24997-0c26-4cf8-87d3-52791cd4680c","Type":"ContainerStarted","Data":"15db9f6f47698b70c7db8e9ec729397d3b5c759b029d6f8e6eb706c1440d07d0"} Jan 30 11:14:31 crc kubenswrapper[4869]: I0130 11:14:31.945817 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ccf24997-0c26-4cf8-87d3-52791cd4680c","Type":"ContainerStarted","Data":"0701b921b2c37e6ac9dd14d3829c98dbbd030317b11925ef9fee336b1d92202a"} Jan 30 11:14:31 crc kubenswrapper[4869]: I0130 11:14:31.958965 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"388d68d4-4b57-458f-9d18-3989cee16c04","Type":"ContainerStarted","Data":"58a729881fc9600375367caf6611a5616f9b34b25643a47c139c3ee3463519cc"} Jan 30 11:14:31 crc kubenswrapper[4869]: I0130 11:14:31.969998 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.982175031 podStartE2EDuration="5.969972612s" podCreationTimestamp="2026-01-30 11:14:26 +0000 UTC" firstStartedPulling="2026-01-30 11:14:27.900595143 +0000 UTC m=+1218.450471209" lastFinishedPulling="2026-01-30 11:14:30.888392714 +0000 UTC m=+1221.438268790" observedRunningTime="2026-01-30 11:14:31.952482645 +0000 UTC m=+1222.502358721" watchObservedRunningTime="2026-01-30 11:14:31.969972612 +0000 UTC m=+1222.519848678" Jan 30 11:14:31 crc kubenswrapper[4869]: I0130 11:14:31.987437 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.033403476 podStartE2EDuration="5.987420667s" podCreationTimestamp="2026-01-30 11:14:26 +0000 UTC" firstStartedPulling="2026-01-30 11:14:27.929041021 +0000 UTC m=+1218.478917087" lastFinishedPulling="2026-01-30 11:14:30.883058212 +0000 UTC m=+1221.432934278" observedRunningTime="2026-01-30 11:14:31.979632146 +0000 UTC m=+1222.529508222" watchObservedRunningTime="2026-01-30 11:14:31.987420667 +0000 UTC m=+1222.537296733" Jan 30 11:14:32 crc kubenswrapper[4869]: I0130 11:14:32.004169 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.515092007 podStartE2EDuration="5.004148142s" podCreationTimestamp="2026-01-30 11:14:27 +0000 UTC" firstStartedPulling="2026-01-30 11:14:28.403370963 +0000 UTC m=+1218.953247029" lastFinishedPulling="2026-01-30 11:14:30.892427098 +0000 UTC m=+1221.442303164" observedRunningTime="2026-01-30 11:14:31.999176361 +0000 UTC m=+1222.549052427" watchObservedRunningTime="2026-01-30 11:14:32.004148142 +0000 UTC m=+1222.554024208" Jan 30 11:14:32 crc kubenswrapper[4869]: I0130 11:14:32.222972 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 30 11:14:32 crc kubenswrapper[4869]: I0130 11:14:32.223037 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 30 11:14:32 crc kubenswrapper[4869]: I0130 11:14:32.508334 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 30 11:14:32 crc kubenswrapper[4869]: I0130 11:14:32.696853 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:14:32 crc kubenswrapper[4869]: I0130 11:14:32.852121 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 11:14:32 crc kubenswrapper[4869]: I0130 11:14:32.875415 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=4.280184853 podStartE2EDuration="6.87538395s" podCreationTimestamp="2026-01-30 11:14:26 +0000 UTC" firstStartedPulling="2026-01-30 11:14:28.289861792 +0000 UTC m=+1218.839737858" lastFinishedPulling="2026-01-30 11:14:30.885060889 +0000 UTC m=+1221.434936955" observedRunningTime="2026-01-30 11:14:32.032028643 +0000 UTC m=+1222.581904709" watchObservedRunningTime="2026-01-30 11:14:32.87538395 +0000 UTC m=+1223.425260016" Jan 30 11:14:32 crc kubenswrapper[4869]: I0130 11:14:32.971555 4869 generic.go:334] "Generic (PLEG): container finished" podID="edd1fc1c-30e8-4e38-890a-0ee566ce2c26" containerID="43800f5b8fd42a51379160cf8be9f1b110845f17a58b4c52202d9d148ddfe584" exitCode=0 Jan 30 11:14:32 crc kubenswrapper[4869]: I0130 11:14:32.971592 4869 generic.go:334] "Generic (PLEG): container finished" podID="edd1fc1c-30e8-4e38-890a-0ee566ce2c26" containerID="4626905124e99f10e2c5f28edc59acfbf95799850d07f7c842f5831dedfbadd7" exitCode=143 Jan 30 11:14:32 crc kubenswrapper[4869]: I0130 11:14:32.971604 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"edd1fc1c-30e8-4e38-890a-0ee566ce2c26","Type":"ContainerDied","Data":"43800f5b8fd42a51379160cf8be9f1b110845f17a58b4c52202d9d148ddfe584"} Jan 30 11:14:32 crc kubenswrapper[4869]: I0130 11:14:32.971590 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 11:14:32 crc kubenswrapper[4869]: I0130 11:14:32.971643 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"edd1fc1c-30e8-4e38-890a-0ee566ce2c26","Type":"ContainerDied","Data":"4626905124e99f10e2c5f28edc59acfbf95799850d07f7c842f5831dedfbadd7"} Jan 30 11:14:32 crc kubenswrapper[4869]: I0130 11:14:32.971657 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"edd1fc1c-30e8-4e38-890a-0ee566ce2c26","Type":"ContainerDied","Data":"7d2cf9285f13d1024ca89a8a8a6005bfa45a74937c7a2409fc699c0601f9c17a"} Jan 30 11:14:32 crc kubenswrapper[4869]: I0130 11:14:32.971680 4869 scope.go:117] "RemoveContainer" containerID="43800f5b8fd42a51379160cf8be9f1b110845f17a58b4c52202d9d148ddfe584" Jan 30 11:14:32 crc kubenswrapper[4869]: I0130 11:14:32.999842 4869 scope.go:117] "RemoveContainer" containerID="4626905124e99f10e2c5f28edc59acfbf95799850d07f7c842f5831dedfbadd7" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.018922 4869 scope.go:117] "RemoveContainer" containerID="43800f5b8fd42a51379160cf8be9f1b110845f17a58b4c52202d9d148ddfe584" Jan 30 11:14:33 crc kubenswrapper[4869]: E0130 11:14:33.019498 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43800f5b8fd42a51379160cf8be9f1b110845f17a58b4c52202d9d148ddfe584\": container with ID starting with 43800f5b8fd42a51379160cf8be9f1b110845f17a58b4c52202d9d148ddfe584 not found: ID does not exist" containerID="43800f5b8fd42a51379160cf8be9f1b110845f17a58b4c52202d9d148ddfe584" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.019555 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43800f5b8fd42a51379160cf8be9f1b110845f17a58b4c52202d9d148ddfe584"} err="failed to get container status \"43800f5b8fd42a51379160cf8be9f1b110845f17a58b4c52202d9d148ddfe584\": rpc error: code = NotFound desc = could not find container \"43800f5b8fd42a51379160cf8be9f1b110845f17a58b4c52202d9d148ddfe584\": container with ID starting with 43800f5b8fd42a51379160cf8be9f1b110845f17a58b4c52202d9d148ddfe584 not found: ID does not exist" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.019587 4869 scope.go:117] "RemoveContainer" containerID="4626905124e99f10e2c5f28edc59acfbf95799850d07f7c842f5831dedfbadd7" Jan 30 11:14:33 crc kubenswrapper[4869]: E0130 11:14:33.019946 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4626905124e99f10e2c5f28edc59acfbf95799850d07f7c842f5831dedfbadd7\": container with ID starting with 4626905124e99f10e2c5f28edc59acfbf95799850d07f7c842f5831dedfbadd7 not found: ID does not exist" containerID="4626905124e99f10e2c5f28edc59acfbf95799850d07f7c842f5831dedfbadd7" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.019977 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4626905124e99f10e2c5f28edc59acfbf95799850d07f7c842f5831dedfbadd7"} err="failed to get container status \"4626905124e99f10e2c5f28edc59acfbf95799850d07f7c842f5831dedfbadd7\": rpc error: code = NotFound desc = could not find container \"4626905124e99f10e2c5f28edc59acfbf95799850d07f7c842f5831dedfbadd7\": container with ID starting with 4626905124e99f10e2c5f28edc59acfbf95799850d07f7c842f5831dedfbadd7 not found: ID does not exist" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.020005 4869 scope.go:117] "RemoveContainer" containerID="43800f5b8fd42a51379160cf8be9f1b110845f17a58b4c52202d9d148ddfe584" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.020248 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43800f5b8fd42a51379160cf8be9f1b110845f17a58b4c52202d9d148ddfe584"} err="failed to get container status \"43800f5b8fd42a51379160cf8be9f1b110845f17a58b4c52202d9d148ddfe584\": rpc error: code = NotFound desc = could not find container \"43800f5b8fd42a51379160cf8be9f1b110845f17a58b4c52202d9d148ddfe584\": container with ID starting with 43800f5b8fd42a51379160cf8be9f1b110845f17a58b4c52202d9d148ddfe584 not found: ID does not exist" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.020275 4869 scope.go:117] "RemoveContainer" containerID="4626905124e99f10e2c5f28edc59acfbf95799850d07f7c842f5831dedfbadd7" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.020501 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4626905124e99f10e2c5f28edc59acfbf95799850d07f7c842f5831dedfbadd7"} err="failed to get container status \"4626905124e99f10e2c5f28edc59acfbf95799850d07f7c842f5831dedfbadd7\": rpc error: code = NotFound desc = could not find container \"4626905124e99f10e2c5f28edc59acfbf95799850d07f7c842f5831dedfbadd7\": container with ID starting with 4626905124e99f10e2c5f28edc59acfbf95799850d07f7c842f5831dedfbadd7 not found: ID does not exist" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.038572 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/edd1fc1c-30e8-4e38-890a-0ee566ce2c26-logs\") pod \"edd1fc1c-30e8-4e38-890a-0ee566ce2c26\" (UID: \"edd1fc1c-30e8-4e38-890a-0ee566ce2c26\") " Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.039050 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/edd1fc1c-30e8-4e38-890a-0ee566ce2c26-logs" (OuterVolumeSpecName: "logs") pod "edd1fc1c-30e8-4e38-890a-0ee566ce2c26" (UID: "edd1fc1c-30e8-4e38-890a-0ee566ce2c26"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.039189 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/edd1fc1c-30e8-4e38-890a-0ee566ce2c26-config-data\") pod \"edd1fc1c-30e8-4e38-890a-0ee566ce2c26\" (UID: \"edd1fc1c-30e8-4e38-890a-0ee566ce2c26\") " Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.039261 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/edd1fc1c-30e8-4e38-890a-0ee566ce2c26-combined-ca-bundle\") pod \"edd1fc1c-30e8-4e38-890a-0ee566ce2c26\" (UID: \"edd1fc1c-30e8-4e38-890a-0ee566ce2c26\") " Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.039330 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6xqd2\" (UniqueName: \"kubernetes.io/projected/edd1fc1c-30e8-4e38-890a-0ee566ce2c26-kube-api-access-6xqd2\") pod \"edd1fc1c-30e8-4e38-890a-0ee566ce2c26\" (UID: \"edd1fc1c-30e8-4e38-890a-0ee566ce2c26\") " Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.039926 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/edd1fc1c-30e8-4e38-890a-0ee566ce2c26-logs\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.049703 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/edd1fc1c-30e8-4e38-890a-0ee566ce2c26-kube-api-access-6xqd2" (OuterVolumeSpecName: "kube-api-access-6xqd2") pod "edd1fc1c-30e8-4e38-890a-0ee566ce2c26" (UID: "edd1fc1c-30e8-4e38-890a-0ee566ce2c26"). InnerVolumeSpecName "kube-api-access-6xqd2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.071385 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/edd1fc1c-30e8-4e38-890a-0ee566ce2c26-config-data" (OuterVolumeSpecName: "config-data") pod "edd1fc1c-30e8-4e38-890a-0ee566ce2c26" (UID: "edd1fc1c-30e8-4e38-890a-0ee566ce2c26"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.088964 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/edd1fc1c-30e8-4e38-890a-0ee566ce2c26-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "edd1fc1c-30e8-4e38-890a-0ee566ce2c26" (UID: "edd1fc1c-30e8-4e38-890a-0ee566ce2c26"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.142318 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/edd1fc1c-30e8-4e38-890a-0ee566ce2c26-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.142957 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/edd1fc1c-30e8-4e38-890a-0ee566ce2c26-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.142972 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6xqd2\" (UniqueName: \"kubernetes.io/projected/edd1fc1c-30e8-4e38-890a-0ee566ce2c26-kube-api-access-6xqd2\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.309785 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.322605 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.336776 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 30 11:14:33 crc kubenswrapper[4869]: E0130 11:14:33.338049 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edd1fc1c-30e8-4e38-890a-0ee566ce2c26" containerName="nova-metadata-log" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.338070 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="edd1fc1c-30e8-4e38-890a-0ee566ce2c26" containerName="nova-metadata-log" Jan 30 11:14:33 crc kubenswrapper[4869]: E0130 11:14:33.338113 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edd1fc1c-30e8-4e38-890a-0ee566ce2c26" containerName="nova-metadata-metadata" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.338120 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="edd1fc1c-30e8-4e38-890a-0ee566ce2c26" containerName="nova-metadata-metadata" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.339236 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="edd1fc1c-30e8-4e38-890a-0ee566ce2c26" containerName="nova-metadata-log" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.339297 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="edd1fc1c-30e8-4e38-890a-0ee566ce2c26" containerName="nova-metadata-metadata" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.342836 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.349214 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.350162 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.365973 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.448349 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89k9w\" (UniqueName: \"kubernetes.io/projected/68531f31-f570-4a6a-9861-f882f3361a82-kube-api-access-89k9w\") pod \"nova-metadata-0\" (UID: \"68531f31-f570-4a6a-9861-f882f3361a82\") " pod="openstack/nova-metadata-0" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.448473 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68531f31-f570-4a6a-9861-f882f3361a82-logs\") pod \"nova-metadata-0\" (UID: \"68531f31-f570-4a6a-9861-f882f3361a82\") " pod="openstack/nova-metadata-0" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.448516 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68531f31-f570-4a6a-9861-f882f3361a82-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"68531f31-f570-4a6a-9861-f882f3361a82\") " pod="openstack/nova-metadata-0" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.448547 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68531f31-f570-4a6a-9861-f882f3361a82-config-data\") pod \"nova-metadata-0\" (UID: \"68531f31-f570-4a6a-9861-f882f3361a82\") " pod="openstack/nova-metadata-0" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.448584 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/68531f31-f570-4a6a-9861-f882f3361a82-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"68531f31-f570-4a6a-9861-f882f3361a82\") " pod="openstack/nova-metadata-0" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.550316 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68531f31-f570-4a6a-9861-f882f3361a82-logs\") pod \"nova-metadata-0\" (UID: \"68531f31-f570-4a6a-9861-f882f3361a82\") " pod="openstack/nova-metadata-0" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.550383 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68531f31-f570-4a6a-9861-f882f3361a82-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"68531f31-f570-4a6a-9861-f882f3361a82\") " pod="openstack/nova-metadata-0" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.550416 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68531f31-f570-4a6a-9861-f882f3361a82-config-data\") pod \"nova-metadata-0\" (UID: \"68531f31-f570-4a6a-9861-f882f3361a82\") " pod="openstack/nova-metadata-0" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.550456 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/68531f31-f570-4a6a-9861-f882f3361a82-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"68531f31-f570-4a6a-9861-f882f3361a82\") " pod="openstack/nova-metadata-0" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.550499 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89k9w\" (UniqueName: \"kubernetes.io/projected/68531f31-f570-4a6a-9861-f882f3361a82-kube-api-access-89k9w\") pod \"nova-metadata-0\" (UID: \"68531f31-f570-4a6a-9861-f882f3361a82\") " pod="openstack/nova-metadata-0" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.551353 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68531f31-f570-4a6a-9861-f882f3361a82-logs\") pod \"nova-metadata-0\" (UID: \"68531f31-f570-4a6a-9861-f882f3361a82\") " pod="openstack/nova-metadata-0" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.565845 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68531f31-f570-4a6a-9861-f882f3361a82-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"68531f31-f570-4a6a-9861-f882f3361a82\") " pod="openstack/nova-metadata-0" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.567104 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68531f31-f570-4a6a-9861-f882f3361a82-config-data\") pod \"nova-metadata-0\" (UID: \"68531f31-f570-4a6a-9861-f882f3361a82\") " pod="openstack/nova-metadata-0" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.567466 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/68531f31-f570-4a6a-9861-f882f3361a82-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"68531f31-f570-4a6a-9861-f882f3361a82\") " pod="openstack/nova-metadata-0" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.568207 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89k9w\" (UniqueName: \"kubernetes.io/projected/68531f31-f570-4a6a-9861-f882f3361a82-kube-api-access-89k9w\") pod \"nova-metadata-0\" (UID: \"68531f31-f570-4a6a-9861-f882f3361a82\") " pod="openstack/nova-metadata-0" Jan 30 11:14:33 crc kubenswrapper[4869]: I0130 11:14:33.675442 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 11:14:34 crc kubenswrapper[4869]: I0130 11:14:34.144574 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="edd1fc1c-30e8-4e38-890a-0ee566ce2c26" path="/var/lib/kubelet/pods/edd1fc1c-30e8-4e38-890a-0ee566ce2c26/volumes" Jan 30 11:14:34 crc kubenswrapper[4869]: I0130 11:14:34.166317 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 11:14:34 crc kubenswrapper[4869]: W0130 11:14:34.169662 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod68531f31_f570_4a6a_9861_f882f3361a82.slice/crio-24e825cd19729dfff4a349227cea23bfc77744a88065ef7dc507e287de5292d6 WatchSource:0}: Error finding container 24e825cd19729dfff4a349227cea23bfc77744a88065ef7dc507e287de5292d6: Status 404 returned error can't find the container with id 24e825cd19729dfff4a349227cea23bfc77744a88065ef7dc507e287de5292d6 Jan 30 11:14:35 crc kubenswrapper[4869]: I0130 11:14:35.001772 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"68531f31-f570-4a6a-9861-f882f3361a82","Type":"ContainerStarted","Data":"61dfdd24acf7e167a39f845676df5ecbdb188b0110ad4a279700ef536a09e9b8"} Jan 30 11:14:35 crc kubenswrapper[4869]: I0130 11:14:35.002597 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"68531f31-f570-4a6a-9861-f882f3361a82","Type":"ContainerStarted","Data":"61a9b1170740efeb4180a9bb446dd17d8d72bd2f3e700743067df4e8b54a99fd"} Jan 30 11:14:35 crc kubenswrapper[4869]: I0130 11:14:35.002688 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"68531f31-f570-4a6a-9861-f882f3361a82","Type":"ContainerStarted","Data":"24e825cd19729dfff4a349227cea23bfc77744a88065ef7dc507e287de5292d6"} Jan 30 11:14:35 crc kubenswrapper[4869]: I0130 11:14:35.025891 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.025868995 podStartE2EDuration="2.025868995s" podCreationTimestamp="2026-01-30 11:14:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:14:35.017645852 +0000 UTC m=+1225.567522278" watchObservedRunningTime="2026-01-30 11:14:35.025868995 +0000 UTC m=+1225.575745071" Jan 30 11:14:36 crc kubenswrapper[4869]: I0130 11:14:36.012891 4869 generic.go:334] "Generic (PLEG): container finished" podID="48c5632d-6e67-4014-9360-c8932146c432" containerID="e62e2bc94f8e1379e29cdd03d3712ba5ca06e49d50685909a4a864d05f0bc5c7" exitCode=0 Jan 30 11:14:36 crc kubenswrapper[4869]: I0130 11:14:36.012984 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-vwj7x" event={"ID":"48c5632d-6e67-4014-9360-c8932146c432","Type":"ContainerDied","Data":"e62e2bc94f8e1379e29cdd03d3712ba5ca06e49d50685909a4a864d05f0bc5c7"} Jan 30 11:14:36 crc kubenswrapper[4869]: I0130 11:14:36.966289 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 30 11:14:37 crc kubenswrapper[4869]: I0130 11:14:37.027665 4869 generic.go:334] "Generic (PLEG): container finished" podID="42a310a9-b061-4d7a-9644-5f1303fc5c15" containerID="e54d60216b7cd4ff9bf216e525d3bff77639b9e99c69e0a57242e96c6750aee3" exitCode=0 Jan 30 11:14:37 crc kubenswrapper[4869]: I0130 11:14:37.027858 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-s8qnc" event={"ID":"42a310a9-b061-4d7a-9644-5f1303fc5c15","Type":"ContainerDied","Data":"e54d60216b7cd4ff9bf216e525d3bff77639b9e99c69e0a57242e96c6750aee3"} Jan 30 11:14:37 crc kubenswrapper[4869]: I0130 11:14:37.249793 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 30 11:14:37 crc kubenswrapper[4869]: I0130 11:14:37.250120 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 30 11:14:37 crc kubenswrapper[4869]: I0130 11:14:37.426776 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-vwj7x" Jan 30 11:14:37 crc kubenswrapper[4869]: I0130 11:14:37.457622 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" Jan 30 11:14:37 crc kubenswrapper[4869]: I0130 11:14:37.511214 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 30 11:14:37 crc kubenswrapper[4869]: I0130 11:14:37.526870 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-gxkk6"] Jan 30 11:14:37 crc kubenswrapper[4869]: I0130 11:14:37.527515 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" podUID="c07567e1-764d-4544-8a1d-ae6826672ae1" containerName="dnsmasq-dns" containerID="cri-o://ee129ebc9616d6e4b06ec77c365e76fc8306fc182d048ddb5f5c047976036814" gracePeriod=10 Jan 30 11:14:37 crc kubenswrapper[4869]: I0130 11:14:37.530456 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48c5632d-6e67-4014-9360-c8932146c432-config-data\") pod \"48c5632d-6e67-4014-9360-c8932146c432\" (UID: \"48c5632d-6e67-4014-9360-c8932146c432\") " Jan 30 11:14:37 crc kubenswrapper[4869]: I0130 11:14:37.530606 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9px5f\" (UniqueName: \"kubernetes.io/projected/48c5632d-6e67-4014-9360-c8932146c432-kube-api-access-9px5f\") pod \"48c5632d-6e67-4014-9360-c8932146c432\" (UID: \"48c5632d-6e67-4014-9360-c8932146c432\") " Jan 30 11:14:37 crc kubenswrapper[4869]: I0130 11:14:37.530692 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48c5632d-6e67-4014-9360-c8932146c432-combined-ca-bundle\") pod \"48c5632d-6e67-4014-9360-c8932146c432\" (UID: \"48c5632d-6e67-4014-9360-c8932146c432\") " Jan 30 11:14:37 crc kubenswrapper[4869]: I0130 11:14:37.530968 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48c5632d-6e67-4014-9360-c8932146c432-scripts\") pod \"48c5632d-6e67-4014-9360-c8932146c432\" (UID: \"48c5632d-6e67-4014-9360-c8932146c432\") " Jan 30 11:14:37 crc kubenswrapper[4869]: I0130 11:14:37.545469 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48c5632d-6e67-4014-9360-c8932146c432-kube-api-access-9px5f" (OuterVolumeSpecName: "kube-api-access-9px5f") pod "48c5632d-6e67-4014-9360-c8932146c432" (UID: "48c5632d-6e67-4014-9360-c8932146c432"). InnerVolumeSpecName "kube-api-access-9px5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:14:37 crc kubenswrapper[4869]: I0130 11:14:37.551833 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48c5632d-6e67-4014-9360-c8932146c432-scripts" (OuterVolumeSpecName: "scripts") pod "48c5632d-6e67-4014-9360-c8932146c432" (UID: "48c5632d-6e67-4014-9360-c8932146c432"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:37 crc kubenswrapper[4869]: I0130 11:14:37.574082 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 30 11:14:37 crc kubenswrapper[4869]: I0130 11:14:37.590810 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48c5632d-6e67-4014-9360-c8932146c432-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "48c5632d-6e67-4014-9360-c8932146c432" (UID: "48c5632d-6e67-4014-9360-c8932146c432"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:37 crc kubenswrapper[4869]: I0130 11:14:37.634266 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48c5632d-6e67-4014-9360-c8932146c432-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:37 crc kubenswrapper[4869]: I0130 11:14:37.634294 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9px5f\" (UniqueName: \"kubernetes.io/projected/48c5632d-6e67-4014-9360-c8932146c432-kube-api-access-9px5f\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:37 crc kubenswrapper[4869]: I0130 11:14:37.634305 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48c5632d-6e67-4014-9360-c8932146c432-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:37 crc kubenswrapper[4869]: I0130 11:14:37.652008 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48c5632d-6e67-4014-9360-c8932146c432-config-data" (OuterVolumeSpecName: "config-data") pod "48c5632d-6e67-4014-9360-c8932146c432" (UID: "48c5632d-6e67-4014-9360-c8932146c432"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:37 crc kubenswrapper[4869]: I0130 11:14:37.736074 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48c5632d-6e67-4014-9360-c8932146c432-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.027022 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.044167 4869 generic.go:334] "Generic (PLEG): container finished" podID="c07567e1-764d-4544-8a1d-ae6826672ae1" containerID="ee129ebc9616d6e4b06ec77c365e76fc8306fc182d048ddb5f5c047976036814" exitCode=0 Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.044269 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" event={"ID":"c07567e1-764d-4544-8a1d-ae6826672ae1","Type":"ContainerDied","Data":"ee129ebc9616d6e4b06ec77c365e76fc8306fc182d048ddb5f5c047976036814"} Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.044308 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" event={"ID":"c07567e1-764d-4544-8a1d-ae6826672ae1","Type":"ContainerDied","Data":"33a22f4df738bdd79eb6fb93de8e8f205945c14ae368919b68f268597ecda891"} Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.044330 4869 scope.go:117] "RemoveContainer" containerID="ee129ebc9616d6e4b06ec77c365e76fc8306fc182d048ddb5f5c047976036814" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.044486 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-gxkk6" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.056777 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-vwj7x" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.058054 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-vwj7x" event={"ID":"48c5632d-6e67-4014-9360-c8932146c432","Type":"ContainerDied","Data":"f5c272d93448ba143eb3431f9f90a84d975d3aa0673e755959c906a30344e2e1"} Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.058125 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5c272d93448ba143eb3431f9f90a84d975d3aa0673e755959c906a30344e2e1" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.094232 4869 scope.go:117] "RemoveContainer" containerID="de9f0ff315708328e77fc504f37cfb6fe20e3555902d2d578bc2f35abc90c375" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.117659 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.143410 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xqzdh\" (UniqueName: \"kubernetes.io/projected/c07567e1-764d-4544-8a1d-ae6826672ae1-kube-api-access-xqzdh\") pod \"c07567e1-764d-4544-8a1d-ae6826672ae1\" (UID: \"c07567e1-764d-4544-8a1d-ae6826672ae1\") " Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.143553 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-dns-swift-storage-0\") pod \"c07567e1-764d-4544-8a1d-ae6826672ae1\" (UID: \"c07567e1-764d-4544-8a1d-ae6826672ae1\") " Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.143593 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-dns-svc\") pod \"c07567e1-764d-4544-8a1d-ae6826672ae1\" (UID: \"c07567e1-764d-4544-8a1d-ae6826672ae1\") " Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.143662 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-config\") pod \"c07567e1-764d-4544-8a1d-ae6826672ae1\" (UID: \"c07567e1-764d-4544-8a1d-ae6826672ae1\") " Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.143721 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-ovsdbserver-nb\") pod \"c07567e1-764d-4544-8a1d-ae6826672ae1\" (UID: \"c07567e1-764d-4544-8a1d-ae6826672ae1\") " Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.143748 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-ovsdbserver-sb\") pod \"c07567e1-764d-4544-8a1d-ae6826672ae1\" (UID: \"c07567e1-764d-4544-8a1d-ae6826672ae1\") " Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.182766 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c07567e1-764d-4544-8a1d-ae6826672ae1-kube-api-access-xqzdh" (OuterVolumeSpecName: "kube-api-access-xqzdh") pod "c07567e1-764d-4544-8a1d-ae6826672ae1" (UID: "c07567e1-764d-4544-8a1d-ae6826672ae1"). InnerVolumeSpecName "kube-api-access-xqzdh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.186071 4869 scope.go:117] "RemoveContainer" containerID="ee129ebc9616d6e4b06ec77c365e76fc8306fc182d048ddb5f5c047976036814" Jan 30 11:14:38 crc kubenswrapper[4869]: E0130 11:14:38.187932 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee129ebc9616d6e4b06ec77c365e76fc8306fc182d048ddb5f5c047976036814\": container with ID starting with ee129ebc9616d6e4b06ec77c365e76fc8306fc182d048ddb5f5c047976036814 not found: ID does not exist" containerID="ee129ebc9616d6e4b06ec77c365e76fc8306fc182d048ddb5f5c047976036814" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.187995 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee129ebc9616d6e4b06ec77c365e76fc8306fc182d048ddb5f5c047976036814"} err="failed to get container status \"ee129ebc9616d6e4b06ec77c365e76fc8306fc182d048ddb5f5c047976036814\": rpc error: code = NotFound desc = could not find container \"ee129ebc9616d6e4b06ec77c365e76fc8306fc182d048ddb5f5c047976036814\": container with ID starting with ee129ebc9616d6e4b06ec77c365e76fc8306fc182d048ddb5f5c047976036814 not found: ID does not exist" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.188031 4869 scope.go:117] "RemoveContainer" containerID="de9f0ff315708328e77fc504f37cfb6fe20e3555902d2d578bc2f35abc90c375" Jan 30 11:14:38 crc kubenswrapper[4869]: E0130 11:14:38.189628 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de9f0ff315708328e77fc504f37cfb6fe20e3555902d2d578bc2f35abc90c375\": container with ID starting with de9f0ff315708328e77fc504f37cfb6fe20e3555902d2d578bc2f35abc90c375 not found: ID does not exist" containerID="de9f0ff315708328e77fc504f37cfb6fe20e3555902d2d578bc2f35abc90c375" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.189700 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de9f0ff315708328e77fc504f37cfb6fe20e3555902d2d578bc2f35abc90c375"} err="failed to get container status \"de9f0ff315708328e77fc504f37cfb6fe20e3555902d2d578bc2f35abc90c375\": rpc error: code = NotFound desc = could not find container \"de9f0ff315708328e77fc504f37cfb6fe20e3555902d2d578bc2f35abc90c375\": container with ID starting with de9f0ff315708328e77fc504f37cfb6fe20e3555902d2d578bc2f35abc90c375 not found: ID does not exist" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.246111 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.246406 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ccf24997-0c26-4cf8-87d3-52791cd4680c" containerName="nova-api-log" containerID="cri-o://0701b921b2c37e6ac9dd14d3829c98dbbd030317b11925ef9fee336b1d92202a" gracePeriod=30 Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.247020 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ccf24997-0c26-4cf8-87d3-52791cd4680c" containerName="nova-api-api" containerID="cri-o://15db9f6f47698b70c7db8e9ec729397d3b5c759b029d6f8e6eb706c1440d07d0" gracePeriod=30 Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.253649 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ccf24997-0c26-4cf8-87d3-52791cd4680c" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.185:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.253888 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ccf24997-0c26-4cf8-87d3-52791cd4680c" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.185:8774/\": EOF (Client.Timeout exceeded while awaiting headers)" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.256117 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xqzdh\" (UniqueName: \"kubernetes.io/projected/c07567e1-764d-4544-8a1d-ae6826672ae1-kube-api-access-xqzdh\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.263815 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.264111 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="68531f31-f570-4a6a-9861-f882f3361a82" containerName="nova-metadata-log" containerID="cri-o://61a9b1170740efeb4180a9bb446dd17d8d72bd2f3e700743067df4e8b54a99fd" gracePeriod=30 Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.264803 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="68531f31-f570-4a6a-9861-f882f3361a82" containerName="nova-metadata-metadata" containerID="cri-o://61dfdd24acf7e167a39f845676df5ecbdb188b0110ad4a279700ef536a09e9b8" gracePeriod=30 Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.288425 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c07567e1-764d-4544-8a1d-ae6826672ae1" (UID: "c07567e1-764d-4544-8a1d-ae6826672ae1"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.310649 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c07567e1-764d-4544-8a1d-ae6826672ae1" (UID: "c07567e1-764d-4544-8a1d-ae6826672ae1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.312467 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c07567e1-764d-4544-8a1d-ae6826672ae1" (UID: "c07567e1-764d-4544-8a1d-ae6826672ae1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.327326 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-config" (OuterVolumeSpecName: "config") pod "c07567e1-764d-4544-8a1d-ae6826672ae1" (UID: "c07567e1-764d-4544-8a1d-ae6826672ae1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.358473 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c07567e1-764d-4544-8a1d-ae6826672ae1" (UID: "c07567e1-764d-4544-8a1d-ae6826672ae1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.358681 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-ovsdbserver-nb\") pod \"c07567e1-764d-4544-8a1d-ae6826672ae1\" (UID: \"c07567e1-764d-4544-8a1d-ae6826672ae1\") " Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.359268 4869 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.359282 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.359291 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.359301 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:38 crc kubenswrapper[4869]: W0130 11:14:38.359472 4869 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/c07567e1-764d-4544-8a1d-ae6826672ae1/volumes/kubernetes.io~configmap/ovsdbserver-nb Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.359484 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c07567e1-764d-4544-8a1d-ae6826672ae1" (UID: "c07567e1-764d-4544-8a1d-ae6826672ae1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.461525 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c07567e1-764d-4544-8a1d-ae6826672ae1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.549864 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-s8qnc" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.664110 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42a310a9-b061-4d7a-9644-5f1303fc5c15-config-data\") pod \"42a310a9-b061-4d7a-9644-5f1303fc5c15\" (UID: \"42a310a9-b061-4d7a-9644-5f1303fc5c15\") " Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.664188 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42a310a9-b061-4d7a-9644-5f1303fc5c15-combined-ca-bundle\") pod \"42a310a9-b061-4d7a-9644-5f1303fc5c15\" (UID: \"42a310a9-b061-4d7a-9644-5f1303fc5c15\") " Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.664221 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bx8q6\" (UniqueName: \"kubernetes.io/projected/42a310a9-b061-4d7a-9644-5f1303fc5c15-kube-api-access-bx8q6\") pod \"42a310a9-b061-4d7a-9644-5f1303fc5c15\" (UID: \"42a310a9-b061-4d7a-9644-5f1303fc5c15\") " Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.664303 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/42a310a9-b061-4d7a-9644-5f1303fc5c15-scripts\") pod \"42a310a9-b061-4d7a-9644-5f1303fc5c15\" (UID: \"42a310a9-b061-4d7a-9644-5f1303fc5c15\") " Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.675427 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42a310a9-b061-4d7a-9644-5f1303fc5c15-scripts" (OuterVolumeSpecName: "scripts") pod "42a310a9-b061-4d7a-9644-5f1303fc5c15" (UID: "42a310a9-b061-4d7a-9644-5f1303fc5c15"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.675518 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.675590 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.675825 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42a310a9-b061-4d7a-9644-5f1303fc5c15-kube-api-access-bx8q6" (OuterVolumeSpecName: "kube-api-access-bx8q6") pod "42a310a9-b061-4d7a-9644-5f1303fc5c15" (UID: "42a310a9-b061-4d7a-9644-5f1303fc5c15"). InnerVolumeSpecName "kube-api-access-bx8q6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.686983 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-gxkk6"] Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.698517 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-gxkk6"] Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.709269 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42a310a9-b061-4d7a-9644-5f1303fc5c15-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "42a310a9-b061-4d7a-9644-5f1303fc5c15" (UID: "42a310a9-b061-4d7a-9644-5f1303fc5c15"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.746594 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42a310a9-b061-4d7a-9644-5f1303fc5c15-config-data" (OuterVolumeSpecName: "config-data") pod "42a310a9-b061-4d7a-9644-5f1303fc5c15" (UID: "42a310a9-b061-4d7a-9644-5f1303fc5c15"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.768596 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42a310a9-b061-4d7a-9644-5f1303fc5c15-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.768634 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42a310a9-b061-4d7a-9644-5f1303fc5c15-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.768647 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bx8q6\" (UniqueName: \"kubernetes.io/projected/42a310a9-b061-4d7a-9644-5f1303fc5c15-kube-api-access-bx8q6\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.768658 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/42a310a9-b061-4d7a-9644-5f1303fc5c15-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:38 crc kubenswrapper[4869]: I0130 11:14:38.794342 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.069661 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-s8qnc" event={"ID":"42a310a9-b061-4d7a-9644-5f1303fc5c15","Type":"ContainerDied","Data":"4e7bb0b2d6b96c8f1c84872ee4b069d9cc40f863801dacda19be1c19a91aaa75"} Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.069995 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4e7bb0b2d6b96c8f1c84872ee4b069d9cc40f863801dacda19be1c19a91aaa75" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.070282 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-s8qnc" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.089727 4869 generic.go:334] "Generic (PLEG): container finished" podID="ccf24997-0c26-4cf8-87d3-52791cd4680c" containerID="0701b921b2c37e6ac9dd14d3829c98dbbd030317b11925ef9fee336b1d92202a" exitCode=143 Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.089794 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ccf24997-0c26-4cf8-87d3-52791cd4680c","Type":"ContainerDied","Data":"0701b921b2c37e6ac9dd14d3829c98dbbd030317b11925ef9fee336b1d92202a"} Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.106223 4869 generic.go:334] "Generic (PLEG): container finished" podID="68531f31-f570-4a6a-9861-f882f3361a82" containerID="61dfdd24acf7e167a39f845676df5ecbdb188b0110ad4a279700ef536a09e9b8" exitCode=0 Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.106267 4869 generic.go:334] "Generic (PLEG): container finished" podID="68531f31-f570-4a6a-9861-f882f3361a82" containerID="61a9b1170740efeb4180a9bb446dd17d8d72bd2f3e700743067df4e8b54a99fd" exitCode=143 Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.107210 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"68531f31-f570-4a6a-9861-f882f3361a82","Type":"ContainerDied","Data":"61dfdd24acf7e167a39f845676df5ecbdb188b0110ad4a279700ef536a09e9b8"} Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.107248 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"68531f31-f570-4a6a-9861-f882f3361a82","Type":"ContainerDied","Data":"61a9b1170740efeb4180a9bb446dd17d8d72bd2f3e700743067df4e8b54a99fd"} Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.153283 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.155571 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 30 11:14:39 crc kubenswrapper[4869]: E0130 11:14:39.156066 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68531f31-f570-4a6a-9861-f882f3361a82" containerName="nova-metadata-metadata" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.156091 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="68531f31-f570-4a6a-9861-f882f3361a82" containerName="nova-metadata-metadata" Jan 30 11:14:39 crc kubenswrapper[4869]: E0130 11:14:39.156103 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c07567e1-764d-4544-8a1d-ae6826672ae1" containerName="init" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.156111 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="c07567e1-764d-4544-8a1d-ae6826672ae1" containerName="init" Jan 30 11:14:39 crc kubenswrapper[4869]: E0130 11:14:39.156133 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68531f31-f570-4a6a-9861-f882f3361a82" containerName="nova-metadata-log" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.156174 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="68531f31-f570-4a6a-9861-f882f3361a82" containerName="nova-metadata-log" Jan 30 11:14:39 crc kubenswrapper[4869]: E0130 11:14:39.156193 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c07567e1-764d-4544-8a1d-ae6826672ae1" containerName="dnsmasq-dns" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.156200 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="c07567e1-764d-4544-8a1d-ae6826672ae1" containerName="dnsmasq-dns" Jan 30 11:14:39 crc kubenswrapper[4869]: E0130 11:14:39.156221 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42a310a9-b061-4d7a-9644-5f1303fc5c15" containerName="nova-cell1-conductor-db-sync" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.156230 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="42a310a9-b061-4d7a-9644-5f1303fc5c15" containerName="nova-cell1-conductor-db-sync" Jan 30 11:14:39 crc kubenswrapper[4869]: E0130 11:14:39.156246 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48c5632d-6e67-4014-9360-c8932146c432" containerName="nova-manage" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.156254 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="48c5632d-6e67-4014-9360-c8932146c432" containerName="nova-manage" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.156465 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="68531f31-f570-4a6a-9861-f882f3361a82" containerName="nova-metadata-log" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.156502 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="68531f31-f570-4a6a-9861-f882f3361a82" containerName="nova-metadata-metadata" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.156524 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="48c5632d-6e67-4014-9360-c8932146c432" containerName="nova-manage" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.156539 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="42a310a9-b061-4d7a-9644-5f1303fc5c15" containerName="nova-cell1-conductor-db-sync" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.156546 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="c07567e1-764d-4544-8a1d-ae6826672ae1" containerName="dnsmasq-dns" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.157309 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.163090 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.188967 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.288998 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/68531f31-f570-4a6a-9861-f882f3361a82-nova-metadata-tls-certs\") pod \"68531f31-f570-4a6a-9861-f882f3361a82\" (UID: \"68531f31-f570-4a6a-9861-f882f3361a82\") " Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.289170 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89k9w\" (UniqueName: \"kubernetes.io/projected/68531f31-f570-4a6a-9861-f882f3361a82-kube-api-access-89k9w\") pod \"68531f31-f570-4a6a-9861-f882f3361a82\" (UID: \"68531f31-f570-4a6a-9861-f882f3361a82\") " Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.289253 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68531f31-f570-4a6a-9861-f882f3361a82-config-data\") pod \"68531f31-f570-4a6a-9861-f882f3361a82\" (UID: \"68531f31-f570-4a6a-9861-f882f3361a82\") " Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.289277 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68531f31-f570-4a6a-9861-f882f3361a82-logs\") pod \"68531f31-f570-4a6a-9861-f882f3361a82\" (UID: \"68531f31-f570-4a6a-9861-f882f3361a82\") " Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.289326 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68531f31-f570-4a6a-9861-f882f3361a82-combined-ca-bundle\") pod \"68531f31-f570-4a6a-9861-f882f3361a82\" (UID: \"68531f31-f570-4a6a-9861-f882f3361a82\") " Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.289575 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68531f31-f570-4a6a-9861-f882f3361a82-logs" (OuterVolumeSpecName: "logs") pod "68531f31-f570-4a6a-9861-f882f3361a82" (UID: "68531f31-f570-4a6a-9861-f882f3361a82"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.289605 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/439024e7-e7a3-42c4-b9a1-db6705ec33d2-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"439024e7-e7a3-42c4-b9a1-db6705ec33d2\") " pod="openstack/nova-cell1-conductor-0" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.289670 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/439024e7-e7a3-42c4-b9a1-db6705ec33d2-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"439024e7-e7a3-42c4-b9a1-db6705ec33d2\") " pod="openstack/nova-cell1-conductor-0" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.289858 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pvvr\" (UniqueName: \"kubernetes.io/projected/439024e7-e7a3-42c4-b9a1-db6705ec33d2-kube-api-access-9pvvr\") pod \"nova-cell1-conductor-0\" (UID: \"439024e7-e7a3-42c4-b9a1-db6705ec33d2\") " pod="openstack/nova-cell1-conductor-0" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.289929 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68531f31-f570-4a6a-9861-f882f3361a82-logs\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.302284 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68531f31-f570-4a6a-9861-f882f3361a82-kube-api-access-89k9w" (OuterVolumeSpecName: "kube-api-access-89k9w") pod "68531f31-f570-4a6a-9861-f882f3361a82" (UID: "68531f31-f570-4a6a-9861-f882f3361a82"). InnerVolumeSpecName "kube-api-access-89k9w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.316641 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68531f31-f570-4a6a-9861-f882f3361a82-config-data" (OuterVolumeSpecName: "config-data") pod "68531f31-f570-4a6a-9861-f882f3361a82" (UID: "68531f31-f570-4a6a-9861-f882f3361a82"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.330808 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68531f31-f570-4a6a-9861-f882f3361a82-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "68531f31-f570-4a6a-9861-f882f3361a82" (UID: "68531f31-f570-4a6a-9861-f882f3361a82"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.340645 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68531f31-f570-4a6a-9861-f882f3361a82-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "68531f31-f570-4a6a-9861-f882f3361a82" (UID: "68531f31-f570-4a6a-9861-f882f3361a82"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.391558 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pvvr\" (UniqueName: \"kubernetes.io/projected/439024e7-e7a3-42c4-b9a1-db6705ec33d2-kube-api-access-9pvvr\") pod \"nova-cell1-conductor-0\" (UID: \"439024e7-e7a3-42c4-b9a1-db6705ec33d2\") " pod="openstack/nova-cell1-conductor-0" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.391644 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/439024e7-e7a3-42c4-b9a1-db6705ec33d2-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"439024e7-e7a3-42c4-b9a1-db6705ec33d2\") " pod="openstack/nova-cell1-conductor-0" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.391702 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/439024e7-e7a3-42c4-b9a1-db6705ec33d2-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"439024e7-e7a3-42c4-b9a1-db6705ec33d2\") " pod="openstack/nova-cell1-conductor-0" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.391815 4869 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/68531f31-f570-4a6a-9861-f882f3361a82-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.391826 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89k9w\" (UniqueName: \"kubernetes.io/projected/68531f31-f570-4a6a-9861-f882f3361a82-kube-api-access-89k9w\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.391836 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68531f31-f570-4a6a-9861-f882f3361a82-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.391845 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68531f31-f570-4a6a-9861-f882f3361a82-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.398586 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/439024e7-e7a3-42c4-b9a1-db6705ec33d2-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"439024e7-e7a3-42c4-b9a1-db6705ec33d2\") " pod="openstack/nova-cell1-conductor-0" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.398619 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/439024e7-e7a3-42c4-b9a1-db6705ec33d2-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"439024e7-e7a3-42c4-b9a1-db6705ec33d2\") " pod="openstack/nova-cell1-conductor-0" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.413552 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pvvr\" (UniqueName: \"kubernetes.io/projected/439024e7-e7a3-42c4-b9a1-db6705ec33d2-kube-api-access-9pvvr\") pod \"nova-cell1-conductor-0\" (UID: \"439024e7-e7a3-42c4-b9a1-db6705ec33d2\") " pod="openstack/nova-cell1-conductor-0" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.484883 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 30 11:14:39 crc kubenswrapper[4869]: I0130 11:14:39.965333 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 30 11:14:39 crc kubenswrapper[4869]: W0130 11:14:39.969526 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod439024e7_e7a3_42c4_b9a1_db6705ec33d2.slice/crio-07bad36b9322f0b213ac681c1a99f42909f82f43825f8ddba17d3188127b9cb7 WatchSource:0}: Error finding container 07bad36b9322f0b213ac681c1a99f42909f82f43825f8ddba17d3188127b9cb7: Status 404 returned error can't find the container with id 07bad36b9322f0b213ac681c1a99f42909f82f43825f8ddba17d3188127b9cb7 Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.122524 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.122514 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"68531f31-f570-4a6a-9861-f882f3361a82","Type":"ContainerDied","Data":"24e825cd19729dfff4a349227cea23bfc77744a88065ef7dc507e287de5292d6"} Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.122948 4869 scope.go:117] "RemoveContainer" containerID="61dfdd24acf7e167a39f845676df5ecbdb188b0110ad4a279700ef536a09e9b8" Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.127459 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"439024e7-e7a3-42c4-b9a1-db6705ec33d2","Type":"ContainerStarted","Data":"07bad36b9322f0b213ac681c1a99f42909f82f43825f8ddba17d3188127b9cb7"} Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.128469 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="388d68d4-4b57-458f-9d18-3989cee16c04" containerName="nova-scheduler-scheduler" containerID="cri-o://58a729881fc9600375367caf6611a5616f9b34b25643a47c139c3ee3463519cc" gracePeriod=30 Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.148909 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c07567e1-764d-4544-8a1d-ae6826672ae1" path="/var/lib/kubelet/pods/c07567e1-764d-4544-8a1d-ae6826672ae1/volumes" Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.166579 4869 scope.go:117] "RemoveContainer" containerID="61a9b1170740efeb4180a9bb446dd17d8d72bd2f3e700743067df4e8b54a99fd" Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.184894 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.205762 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.218888 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.220616 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.225876 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.226540 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.233249 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.307883 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a9a5962-e1b8-48ff-86c2-6464d47c9077-logs\") pod \"nova-metadata-0\" (UID: \"5a9a5962-e1b8-48ff-86c2-6464d47c9077\") " pod="openstack/nova-metadata-0" Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.308008 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a9a5962-e1b8-48ff-86c2-6464d47c9077-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5a9a5962-e1b8-48ff-86c2-6464d47c9077\") " pod="openstack/nova-metadata-0" Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.308042 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a9a5962-e1b8-48ff-86c2-6464d47c9077-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5a9a5962-e1b8-48ff-86c2-6464d47c9077\") " pod="openstack/nova-metadata-0" Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.308201 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a9a5962-e1b8-48ff-86c2-6464d47c9077-config-data\") pod \"nova-metadata-0\" (UID: \"5a9a5962-e1b8-48ff-86c2-6464d47c9077\") " pod="openstack/nova-metadata-0" Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.308362 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jz6hq\" (UniqueName: \"kubernetes.io/projected/5a9a5962-e1b8-48ff-86c2-6464d47c9077-kube-api-access-jz6hq\") pod \"nova-metadata-0\" (UID: \"5a9a5962-e1b8-48ff-86c2-6464d47c9077\") " pod="openstack/nova-metadata-0" Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.410101 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jz6hq\" (UniqueName: \"kubernetes.io/projected/5a9a5962-e1b8-48ff-86c2-6464d47c9077-kube-api-access-jz6hq\") pod \"nova-metadata-0\" (UID: \"5a9a5962-e1b8-48ff-86c2-6464d47c9077\") " pod="openstack/nova-metadata-0" Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.410204 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a9a5962-e1b8-48ff-86c2-6464d47c9077-logs\") pod \"nova-metadata-0\" (UID: \"5a9a5962-e1b8-48ff-86c2-6464d47c9077\") " pod="openstack/nova-metadata-0" Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.410241 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a9a5962-e1b8-48ff-86c2-6464d47c9077-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5a9a5962-e1b8-48ff-86c2-6464d47c9077\") " pod="openstack/nova-metadata-0" Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.410263 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a9a5962-e1b8-48ff-86c2-6464d47c9077-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5a9a5962-e1b8-48ff-86c2-6464d47c9077\") " pod="openstack/nova-metadata-0" Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.410344 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a9a5962-e1b8-48ff-86c2-6464d47c9077-config-data\") pod \"nova-metadata-0\" (UID: \"5a9a5962-e1b8-48ff-86c2-6464d47c9077\") " pod="openstack/nova-metadata-0" Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.410617 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a9a5962-e1b8-48ff-86c2-6464d47c9077-logs\") pod \"nova-metadata-0\" (UID: \"5a9a5962-e1b8-48ff-86c2-6464d47c9077\") " pod="openstack/nova-metadata-0" Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.416377 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a9a5962-e1b8-48ff-86c2-6464d47c9077-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5a9a5962-e1b8-48ff-86c2-6464d47c9077\") " pod="openstack/nova-metadata-0" Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.416644 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a9a5962-e1b8-48ff-86c2-6464d47c9077-config-data\") pod \"nova-metadata-0\" (UID: \"5a9a5962-e1b8-48ff-86c2-6464d47c9077\") " pod="openstack/nova-metadata-0" Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.416415 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a9a5962-e1b8-48ff-86c2-6464d47c9077-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5a9a5962-e1b8-48ff-86c2-6464d47c9077\") " pod="openstack/nova-metadata-0" Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.435193 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jz6hq\" (UniqueName: \"kubernetes.io/projected/5a9a5962-e1b8-48ff-86c2-6464d47c9077-kube-api-access-jz6hq\") pod \"nova-metadata-0\" (UID: \"5a9a5962-e1b8-48ff-86c2-6464d47c9077\") " pod="openstack/nova-metadata-0" Jan 30 11:14:40 crc kubenswrapper[4869]: I0130 11:14:40.539042 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 11:14:41 crc kubenswrapper[4869]: W0130 11:14:41.025443 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5a9a5962_e1b8_48ff_86c2_6464d47c9077.slice/crio-ce24b4b1421eaa1e0d0ada5a189b288b441e12fe2b45c10297ffc339a65da91c WatchSource:0}: Error finding container ce24b4b1421eaa1e0d0ada5a189b288b441e12fe2b45c10297ffc339a65da91c: Status 404 returned error can't find the container with id ce24b4b1421eaa1e0d0ada5a189b288b441e12fe2b45c10297ffc339a65da91c Jan 30 11:14:41 crc kubenswrapper[4869]: I0130 11:14:41.044506 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 11:14:41 crc kubenswrapper[4869]: I0130 11:14:41.137822 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"439024e7-e7a3-42c4-b9a1-db6705ec33d2","Type":"ContainerStarted","Data":"f1f67a718d23c87ea020c3a69713e73dad35f9fc50127281c32e30e68f5980e5"} Jan 30 11:14:41 crc kubenswrapper[4869]: I0130 11:14:41.137931 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Jan 30 11:14:41 crc kubenswrapper[4869]: I0130 11:14:41.142152 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5a9a5962-e1b8-48ff-86c2-6464d47c9077","Type":"ContainerStarted","Data":"ce24b4b1421eaa1e0d0ada5a189b288b441e12fe2b45c10297ffc339a65da91c"} Jan 30 11:14:41 crc kubenswrapper[4869]: I0130 11:14:41.154503 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.154477689 podStartE2EDuration="2.154477689s" podCreationTimestamp="2026-01-30 11:14:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:14:41.152830533 +0000 UTC m=+1231.702706599" watchObservedRunningTime="2026-01-30 11:14:41.154477689 +0000 UTC m=+1231.704353755" Jan 30 11:14:41 crc kubenswrapper[4869]: I0130 11:14:41.906911 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 30 11:14:41 crc kubenswrapper[4869]: I0130 11:14:41.907414 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="836183b5-4755-4622-a1da-438a1ec0b119" containerName="kube-state-metrics" containerID="cri-o://c93c7651e489d8173d3b52e5b5ca98b94c7f94435736b599f71054e104d9bb0f" gracePeriod=30 Jan 30 11:14:42 crc kubenswrapper[4869]: I0130 11:14:42.161885 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68531f31-f570-4a6a-9861-f882f3361a82" path="/var/lib/kubelet/pods/68531f31-f570-4a6a-9861-f882f3361a82/volumes" Jan 30 11:14:42 crc kubenswrapper[4869]: I0130 11:14:42.164597 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5a9a5962-e1b8-48ff-86c2-6464d47c9077","Type":"ContainerStarted","Data":"335df5527f2d4ffccc0419d6660dad38be781629607e030a9dd58a70dc5f8588"} Jan 30 11:14:42 crc kubenswrapper[4869]: I0130 11:14:42.164633 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5a9a5962-e1b8-48ff-86c2-6464d47c9077","Type":"ContainerStarted","Data":"845fdb029cafa9a7661b2b2962c2e99d40dab66762f37373702c79fa3717824b"} Jan 30 11:14:42 crc kubenswrapper[4869]: I0130 11:14:42.189702 4869 generic.go:334] "Generic (PLEG): container finished" podID="836183b5-4755-4622-a1da-438a1ec0b119" containerID="c93c7651e489d8173d3b52e5b5ca98b94c7f94435736b599f71054e104d9bb0f" exitCode=2 Jan 30 11:14:42 crc kubenswrapper[4869]: I0130 11:14:42.190581 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"836183b5-4755-4622-a1da-438a1ec0b119","Type":"ContainerDied","Data":"c93c7651e489d8173d3b52e5b5ca98b94c7f94435736b599f71054e104d9bb0f"} Jan 30 11:14:42 crc kubenswrapper[4869]: I0130 11:14:42.196593 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.196569347 podStartE2EDuration="2.196569347s" podCreationTimestamp="2026-01-30 11:14:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:14:42.188500148 +0000 UTC m=+1232.738376214" watchObservedRunningTime="2026-01-30 11:14:42.196569347 +0000 UTC m=+1232.746445413" Jan 30 11:14:42 crc kubenswrapper[4869]: I0130 11:14:42.488608 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 30 11:14:42 crc kubenswrapper[4869]: E0130 11:14:42.513951 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="58a729881fc9600375367caf6611a5616f9b34b25643a47c139c3ee3463519cc" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 30 11:14:42 crc kubenswrapper[4869]: E0130 11:14:42.518668 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="58a729881fc9600375367caf6611a5616f9b34b25643a47c139c3ee3463519cc" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 30 11:14:42 crc kubenswrapper[4869]: E0130 11:14:42.520199 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="58a729881fc9600375367caf6611a5616f9b34b25643a47c139c3ee3463519cc" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 30 11:14:42 crc kubenswrapper[4869]: E0130 11:14:42.520272 4869 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="388d68d4-4b57-458f-9d18-3989cee16c04" containerName="nova-scheduler-scheduler" Jan 30 11:14:42 crc kubenswrapper[4869]: I0130 11:14:42.550837 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2r5zt\" (UniqueName: \"kubernetes.io/projected/836183b5-4755-4622-a1da-438a1ec0b119-kube-api-access-2r5zt\") pod \"836183b5-4755-4622-a1da-438a1ec0b119\" (UID: \"836183b5-4755-4622-a1da-438a1ec0b119\") " Jan 30 11:14:42 crc kubenswrapper[4869]: I0130 11:14:42.573892 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/836183b5-4755-4622-a1da-438a1ec0b119-kube-api-access-2r5zt" (OuterVolumeSpecName: "kube-api-access-2r5zt") pod "836183b5-4755-4622-a1da-438a1ec0b119" (UID: "836183b5-4755-4622-a1da-438a1ec0b119"). InnerVolumeSpecName "kube-api-access-2r5zt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:14:42 crc kubenswrapper[4869]: I0130 11:14:42.653494 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2r5zt\" (UniqueName: \"kubernetes.io/projected/836183b5-4755-4622-a1da-438a1ec0b119-kube-api-access-2r5zt\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.230910 4869 generic.go:334] "Generic (PLEG): container finished" podID="388d68d4-4b57-458f-9d18-3989cee16c04" containerID="58a729881fc9600375367caf6611a5616f9b34b25643a47c139c3ee3463519cc" exitCode=0 Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.231190 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"388d68d4-4b57-458f-9d18-3989cee16c04","Type":"ContainerDied","Data":"58a729881fc9600375367caf6611a5616f9b34b25643a47c139c3ee3463519cc"} Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.239558 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"836183b5-4755-4622-a1da-438a1ec0b119","Type":"ContainerDied","Data":"5bad06863deb49dca58a971e426eae705a40348a0c51708739e6a7e0f391e34d"} Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.239603 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.239638 4869 scope.go:117] "RemoveContainer" containerID="c93c7651e489d8173d3b52e5b5ca98b94c7f94435736b599f71054e104d9bb0f" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.290778 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.339916 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.379902 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Jan 30 11:14:43 crc kubenswrapper[4869]: E0130 11:14:43.380377 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="836183b5-4755-4622-a1da-438a1ec0b119" containerName="kube-state-metrics" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.380390 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="836183b5-4755-4622-a1da-438a1ec0b119" containerName="kube-state-metrics" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.380592 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="836183b5-4755-4622-a1da-438a1ec0b119" containerName="kube-state-metrics" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.381268 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.389024 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.389277 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.410181 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.471459 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/d098b42f-f300-4308-93b0-fe2af785ce4c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"d098b42f-f300-4308-93b0-fe2af785ce4c\") " pod="openstack/kube-state-metrics-0" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.471547 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pqpj\" (UniqueName: \"kubernetes.io/projected/d098b42f-f300-4308-93b0-fe2af785ce4c-kube-api-access-4pqpj\") pod \"kube-state-metrics-0\" (UID: \"d098b42f-f300-4308-93b0-fe2af785ce4c\") " pod="openstack/kube-state-metrics-0" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.471602 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d098b42f-f300-4308-93b0-fe2af785ce4c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"d098b42f-f300-4308-93b0-fe2af785ce4c\") " pod="openstack/kube-state-metrics-0" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.471862 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/d098b42f-f300-4308-93b0-fe2af785ce4c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"d098b42f-f300-4308-93b0-fe2af785ce4c\") " pod="openstack/kube-state-metrics-0" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.574684 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/d098b42f-f300-4308-93b0-fe2af785ce4c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"d098b42f-f300-4308-93b0-fe2af785ce4c\") " pod="openstack/kube-state-metrics-0" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.574796 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/d098b42f-f300-4308-93b0-fe2af785ce4c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"d098b42f-f300-4308-93b0-fe2af785ce4c\") " pod="openstack/kube-state-metrics-0" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.574855 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pqpj\" (UniqueName: \"kubernetes.io/projected/d098b42f-f300-4308-93b0-fe2af785ce4c-kube-api-access-4pqpj\") pod \"kube-state-metrics-0\" (UID: \"d098b42f-f300-4308-93b0-fe2af785ce4c\") " pod="openstack/kube-state-metrics-0" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.574897 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d098b42f-f300-4308-93b0-fe2af785ce4c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"d098b42f-f300-4308-93b0-fe2af785ce4c\") " pod="openstack/kube-state-metrics-0" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.580997 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d098b42f-f300-4308-93b0-fe2af785ce4c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"d098b42f-f300-4308-93b0-fe2af785ce4c\") " pod="openstack/kube-state-metrics-0" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.581282 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/d098b42f-f300-4308-93b0-fe2af785ce4c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"d098b42f-f300-4308-93b0-fe2af785ce4c\") " pod="openstack/kube-state-metrics-0" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.581076 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/d098b42f-f300-4308-93b0-fe2af785ce4c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"d098b42f-f300-4308-93b0-fe2af785ce4c\") " pod="openstack/kube-state-metrics-0" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.592496 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pqpj\" (UniqueName: \"kubernetes.io/projected/d098b42f-f300-4308-93b0-fe2af785ce4c-kube-api-access-4pqpj\") pod \"kube-state-metrics-0\" (UID: \"d098b42f-f300-4308-93b0-fe2af785ce4c\") " pod="openstack/kube-state-metrics-0" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.726376 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.816123 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.879202 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bhdzv\" (UniqueName: \"kubernetes.io/projected/388d68d4-4b57-458f-9d18-3989cee16c04-kube-api-access-bhdzv\") pod \"388d68d4-4b57-458f-9d18-3989cee16c04\" (UID: \"388d68d4-4b57-458f-9d18-3989cee16c04\") " Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.879817 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/388d68d4-4b57-458f-9d18-3989cee16c04-combined-ca-bundle\") pod \"388d68d4-4b57-458f-9d18-3989cee16c04\" (UID: \"388d68d4-4b57-458f-9d18-3989cee16c04\") " Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.879887 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/388d68d4-4b57-458f-9d18-3989cee16c04-config-data\") pod \"388d68d4-4b57-458f-9d18-3989cee16c04\" (UID: \"388d68d4-4b57-458f-9d18-3989cee16c04\") " Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.889573 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/388d68d4-4b57-458f-9d18-3989cee16c04-kube-api-access-bhdzv" (OuterVolumeSpecName: "kube-api-access-bhdzv") pod "388d68d4-4b57-458f-9d18-3989cee16c04" (UID: "388d68d4-4b57-458f-9d18-3989cee16c04"). InnerVolumeSpecName "kube-api-access-bhdzv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.912892 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/388d68d4-4b57-458f-9d18-3989cee16c04-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "388d68d4-4b57-458f-9d18-3989cee16c04" (UID: "388d68d4-4b57-458f-9d18-3989cee16c04"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.916973 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/388d68d4-4b57-458f-9d18-3989cee16c04-config-data" (OuterVolumeSpecName: "config-data") pod "388d68d4-4b57-458f-9d18-3989cee16c04" (UID: "388d68d4-4b57-458f-9d18-3989cee16c04"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.992117 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/388d68d4-4b57-458f-9d18-3989cee16c04-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.992152 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/388d68d4-4b57-458f-9d18-3989cee16c04-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:43 crc kubenswrapper[4869]: I0130 11:14:43.992161 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bhdzv\" (UniqueName: \"kubernetes.io/projected/388d68d4-4b57-458f-9d18-3989cee16c04-kube-api-access-bhdzv\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.128362 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.155229 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="836183b5-4755-4622-a1da-438a1ec0b119" path="/var/lib/kubelet/pods/836183b5-4755-4622-a1da-438a1ec0b119/volumes" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.196416 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccf24997-0c26-4cf8-87d3-52791cd4680c-combined-ca-bundle\") pod \"ccf24997-0c26-4cf8-87d3-52791cd4680c\" (UID: \"ccf24997-0c26-4cf8-87d3-52791cd4680c\") " Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.196906 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbbkm\" (UniqueName: \"kubernetes.io/projected/ccf24997-0c26-4cf8-87d3-52791cd4680c-kube-api-access-rbbkm\") pod \"ccf24997-0c26-4cf8-87d3-52791cd4680c\" (UID: \"ccf24997-0c26-4cf8-87d3-52791cd4680c\") " Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.197060 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ccf24997-0c26-4cf8-87d3-52791cd4680c-logs\") pod \"ccf24997-0c26-4cf8-87d3-52791cd4680c\" (UID: \"ccf24997-0c26-4cf8-87d3-52791cd4680c\") " Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.197119 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccf24997-0c26-4cf8-87d3-52791cd4680c-config-data\") pod \"ccf24997-0c26-4cf8-87d3-52791cd4680c\" (UID: \"ccf24997-0c26-4cf8-87d3-52791cd4680c\") " Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.197996 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccf24997-0c26-4cf8-87d3-52791cd4680c-logs" (OuterVolumeSpecName: "logs") pod "ccf24997-0c26-4cf8-87d3-52791cd4680c" (UID: "ccf24997-0c26-4cf8-87d3-52791cd4680c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.198284 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ccf24997-0c26-4cf8-87d3-52791cd4680c-logs\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.219248 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccf24997-0c26-4cf8-87d3-52791cd4680c-kube-api-access-rbbkm" (OuterVolumeSpecName: "kube-api-access-rbbkm") pod "ccf24997-0c26-4cf8-87d3-52791cd4680c" (UID: "ccf24997-0c26-4cf8-87d3-52791cd4680c"). InnerVolumeSpecName "kube-api-access-rbbkm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.225376 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccf24997-0c26-4cf8-87d3-52791cd4680c-config-data" (OuterVolumeSpecName: "config-data") pod "ccf24997-0c26-4cf8-87d3-52791cd4680c" (UID: "ccf24997-0c26-4cf8-87d3-52791cd4680c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.237656 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccf24997-0c26-4cf8-87d3-52791cd4680c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ccf24997-0c26-4cf8-87d3-52791cd4680c" (UID: "ccf24997-0c26-4cf8-87d3-52791cd4680c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.251148 4869 generic.go:334] "Generic (PLEG): container finished" podID="ccf24997-0c26-4cf8-87d3-52791cd4680c" containerID="15db9f6f47698b70c7db8e9ec729397d3b5c759b029d6f8e6eb706c1440d07d0" exitCode=0 Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.251198 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ccf24997-0c26-4cf8-87d3-52791cd4680c","Type":"ContainerDied","Data":"15db9f6f47698b70c7db8e9ec729397d3b5c759b029d6f8e6eb706c1440d07d0"} Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.251222 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ccf24997-0c26-4cf8-87d3-52791cd4680c","Type":"ContainerDied","Data":"569cce4c001bb6a35c526a9a5d4a35b1606c4591ba9511c8b0116baec1ea45d6"} Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.251242 4869 scope.go:117] "RemoveContainer" containerID="15db9f6f47698b70c7db8e9ec729397d3b5c759b029d6f8e6eb706c1440d07d0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.251362 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.261998 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"388d68d4-4b57-458f-9d18-3989cee16c04","Type":"ContainerDied","Data":"e2afd93a8474edb4ac1795792a3d8a122f825fc5637d34bc98fa11a43795a1ef"} Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.262104 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.301288 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccf24997-0c26-4cf8-87d3-52791cd4680c-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.301321 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccf24997-0c26-4cf8-87d3-52791cd4680c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.301336 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbbkm\" (UniqueName: \"kubernetes.io/projected/ccf24997-0c26-4cf8-87d3-52791cd4680c-kube-api-access-rbbkm\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.303239 4869 scope.go:117] "RemoveContainer" containerID="0701b921b2c37e6ac9dd14d3829c98dbbd030317b11925ef9fee336b1d92202a" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.306381 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.306680 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a96afda1-0656-4607-b671-65ace8da5d7b" containerName="ceilometer-central-agent" containerID="cri-o://3813d207347665549df9186414444bfd8577be70c90451254fc2a551674a5fb5" gracePeriod=30 Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.306849 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a96afda1-0656-4607-b671-65ace8da5d7b" containerName="proxy-httpd" containerID="cri-o://3dc5aeba2c8fa1c20ec7a1d82a90b0d78b32bc2141d3a2eea87b0b28d89aec83" gracePeriod=30 Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.306952 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a96afda1-0656-4607-b671-65ace8da5d7b" containerName="sg-core" containerID="cri-o://f6e1c7449026dcdea8b601bec90363c3293832f4ab8a7ef8b2ddf2504f46b659" gracePeriod=30 Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.306996 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a96afda1-0656-4607-b671-65ace8da5d7b" containerName="ceilometer-notification-agent" containerID="cri-o://6fec61e9e5af194ad8d5c4caa520af3d1ae34a68f458cceeb429d1ac0596cd82" gracePeriod=30 Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.344759 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.348118 4869 scope.go:117] "RemoveContainer" containerID="15db9f6f47698b70c7db8e9ec729397d3b5c759b029d6f8e6eb706c1440d07d0" Jan 30 11:14:44 crc kubenswrapper[4869]: E0130 11:14:44.348647 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15db9f6f47698b70c7db8e9ec729397d3b5c759b029d6f8e6eb706c1440d07d0\": container with ID starting with 15db9f6f47698b70c7db8e9ec729397d3b5c759b029d6f8e6eb706c1440d07d0 not found: ID does not exist" containerID="15db9f6f47698b70c7db8e9ec729397d3b5c759b029d6f8e6eb706c1440d07d0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.348686 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15db9f6f47698b70c7db8e9ec729397d3b5c759b029d6f8e6eb706c1440d07d0"} err="failed to get container status \"15db9f6f47698b70c7db8e9ec729397d3b5c759b029d6f8e6eb706c1440d07d0\": rpc error: code = NotFound desc = could not find container \"15db9f6f47698b70c7db8e9ec729397d3b5c759b029d6f8e6eb706c1440d07d0\": container with ID starting with 15db9f6f47698b70c7db8e9ec729397d3b5c759b029d6f8e6eb706c1440d07d0 not found: ID does not exist" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.348731 4869 scope.go:117] "RemoveContainer" containerID="0701b921b2c37e6ac9dd14d3829c98dbbd030317b11925ef9fee336b1d92202a" Jan 30 11:14:44 crc kubenswrapper[4869]: E0130 11:14:44.348958 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0701b921b2c37e6ac9dd14d3829c98dbbd030317b11925ef9fee336b1d92202a\": container with ID starting with 0701b921b2c37e6ac9dd14d3829c98dbbd030317b11925ef9fee336b1d92202a not found: ID does not exist" containerID="0701b921b2c37e6ac9dd14d3829c98dbbd030317b11925ef9fee336b1d92202a" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.348981 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0701b921b2c37e6ac9dd14d3829c98dbbd030317b11925ef9fee336b1d92202a"} err="failed to get container status \"0701b921b2c37e6ac9dd14d3829c98dbbd030317b11925ef9fee336b1d92202a\": rpc error: code = NotFound desc = could not find container \"0701b921b2c37e6ac9dd14d3829c98dbbd030317b11925ef9fee336b1d92202a\": container with ID starting with 0701b921b2c37e6ac9dd14d3829c98dbbd030317b11925ef9fee336b1d92202a not found: ID does not exist" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.349024 4869 scope.go:117] "RemoveContainer" containerID="58a729881fc9600375367caf6611a5616f9b34b25643a47c139c3ee3463519cc" Jan 30 11:14:44 crc kubenswrapper[4869]: W0130 11:14:44.356309 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd098b42f_f300_4308_93b0_fe2af785ce4c.slice/crio-a88ac13c5eaf94c8b1980d72f0808839522a46283692e4ed1e72cc48d45196c4 WatchSource:0}: Error finding container a88ac13c5eaf94c8b1980d72f0808839522a46283692e4ed1e72cc48d45196c4: Status 404 returned error can't find the container with id a88ac13c5eaf94c8b1980d72f0808839522a46283692e4ed1e72cc48d45196c4 Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.370535 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.390200 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.398502 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.406189 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.413619 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 11:14:44 crc kubenswrapper[4869]: E0130 11:14:44.414034 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccf24997-0c26-4cf8-87d3-52791cd4680c" containerName="nova-api-api" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.414046 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccf24997-0c26-4cf8-87d3-52791cd4680c" containerName="nova-api-api" Jan 30 11:14:44 crc kubenswrapper[4869]: E0130 11:14:44.414057 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccf24997-0c26-4cf8-87d3-52791cd4680c" containerName="nova-api-log" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.414063 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccf24997-0c26-4cf8-87d3-52791cd4680c" containerName="nova-api-log" Jan 30 11:14:44 crc kubenswrapper[4869]: E0130 11:14:44.414078 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="388d68d4-4b57-458f-9d18-3989cee16c04" containerName="nova-scheduler-scheduler" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.414083 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="388d68d4-4b57-458f-9d18-3989cee16c04" containerName="nova-scheduler-scheduler" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.414255 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccf24997-0c26-4cf8-87d3-52791cd4680c" containerName="nova-api-log" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.414267 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccf24997-0c26-4cf8-87d3-52791cd4680c" containerName="nova-api-api" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.414285 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="388d68d4-4b57-458f-9d18-3989cee16c04" containerName="nova-scheduler-scheduler" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.414893 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.418088 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.426456 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.441927 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.443866 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.447956 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.449878 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.504653 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a01e50c7-dd24-4042-a0d9-58a62a6c946f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a01e50c7-dd24-4042-a0d9-58a62a6c946f\") " pod="openstack/nova-scheduler-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.504800 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a01e50c7-dd24-4042-a0d9-58a62a6c946f-config-data\") pod \"nova-scheduler-0\" (UID: \"a01e50c7-dd24-4042-a0d9-58a62a6c946f\") " pod="openstack/nova-scheduler-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.504833 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jnvq\" (UniqueName: \"kubernetes.io/projected/a01e50c7-dd24-4042-a0d9-58a62a6c946f-kube-api-access-2jnvq\") pod \"nova-scheduler-0\" (UID: \"a01e50c7-dd24-4042-a0d9-58a62a6c946f\") " pod="openstack/nova-scheduler-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.607458 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3-logs\") pod \"nova-api-0\" (UID: \"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3\") " pod="openstack/nova-api-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.607506 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3\") " pod="openstack/nova-api-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.607539 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a01e50c7-dd24-4042-a0d9-58a62a6c946f-config-data\") pod \"nova-scheduler-0\" (UID: \"a01e50c7-dd24-4042-a0d9-58a62a6c946f\") " pod="openstack/nova-scheduler-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.607560 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jnvq\" (UniqueName: \"kubernetes.io/projected/a01e50c7-dd24-4042-a0d9-58a62a6c946f-kube-api-access-2jnvq\") pod \"nova-scheduler-0\" (UID: \"a01e50c7-dd24-4042-a0d9-58a62a6c946f\") " pod="openstack/nova-scheduler-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.607646 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pg9ff\" (UniqueName: \"kubernetes.io/projected/6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3-kube-api-access-pg9ff\") pod \"nova-api-0\" (UID: \"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3\") " pod="openstack/nova-api-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.607671 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a01e50c7-dd24-4042-a0d9-58a62a6c946f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a01e50c7-dd24-4042-a0d9-58a62a6c946f\") " pod="openstack/nova-scheduler-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.607735 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3-config-data\") pod \"nova-api-0\" (UID: \"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3\") " pod="openstack/nova-api-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.614184 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a01e50c7-dd24-4042-a0d9-58a62a6c946f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a01e50c7-dd24-4042-a0d9-58a62a6c946f\") " pod="openstack/nova-scheduler-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.614237 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a01e50c7-dd24-4042-a0d9-58a62a6c946f-config-data\") pod \"nova-scheduler-0\" (UID: \"a01e50c7-dd24-4042-a0d9-58a62a6c946f\") " pod="openstack/nova-scheduler-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.627639 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jnvq\" (UniqueName: \"kubernetes.io/projected/a01e50c7-dd24-4042-a0d9-58a62a6c946f-kube-api-access-2jnvq\") pod \"nova-scheduler-0\" (UID: \"a01e50c7-dd24-4042-a0d9-58a62a6c946f\") " pod="openstack/nova-scheduler-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.709202 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pg9ff\" (UniqueName: \"kubernetes.io/projected/6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3-kube-api-access-pg9ff\") pod \"nova-api-0\" (UID: \"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3\") " pod="openstack/nova-api-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.709290 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3-config-data\") pod \"nova-api-0\" (UID: \"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3\") " pod="openstack/nova-api-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.709328 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3-logs\") pod \"nova-api-0\" (UID: \"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3\") " pod="openstack/nova-api-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.709361 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3\") " pod="openstack/nova-api-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.711394 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3-logs\") pod \"nova-api-0\" (UID: \"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3\") " pod="openstack/nova-api-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.714199 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3\") " pod="openstack/nova-api-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.714375 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3-config-data\") pod \"nova-api-0\" (UID: \"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3\") " pod="openstack/nova-api-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.729877 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pg9ff\" (UniqueName: \"kubernetes.io/projected/6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3-kube-api-access-pg9ff\") pod \"nova-api-0\" (UID: \"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3\") " pod="openstack/nova-api-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.730873 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 30 11:14:44 crc kubenswrapper[4869]: I0130 11:14:44.774041 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.230838 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.275072 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d098b42f-f300-4308-93b0-fe2af785ce4c","Type":"ContainerStarted","Data":"b9690da5434b6d4146f8eab01da1057397213e30a8e072731befa7042dbba543"} Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.275128 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d098b42f-f300-4308-93b0-fe2af785ce4c","Type":"ContainerStarted","Data":"a88ac13c5eaf94c8b1980d72f0808839522a46283692e4ed1e72cc48d45196c4"} Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.276441 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Jan 30 11:14:45 crc kubenswrapper[4869]: W0130 11:14:45.279884 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda01e50c7_dd24_4042_a0d9_58a62a6c946f.slice/crio-aca136a6f6adfb6530797a2d3b2bb8a047dab95a24798eed4dca95d03e23a5c2 WatchSource:0}: Error finding container aca136a6f6adfb6530797a2d3b2bb8a047dab95a24798eed4dca95d03e23a5c2: Status 404 returned error can't find the container with id aca136a6f6adfb6530797a2d3b2bb8a047dab95a24798eed4dca95d03e23a5c2 Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.281346 4869 generic.go:334] "Generic (PLEG): container finished" podID="a96afda1-0656-4607-b671-65ace8da5d7b" containerID="3dc5aeba2c8fa1c20ec7a1d82a90b0d78b32bc2141d3a2eea87b0b28d89aec83" exitCode=0 Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.281378 4869 generic.go:334] "Generic (PLEG): container finished" podID="a96afda1-0656-4607-b671-65ace8da5d7b" containerID="f6e1c7449026dcdea8b601bec90363c3293832f4ab8a7ef8b2ddf2504f46b659" exitCode=2 Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.281387 4869 generic.go:334] "Generic (PLEG): container finished" podID="a96afda1-0656-4607-b671-65ace8da5d7b" containerID="6fec61e9e5af194ad8d5c4caa520af3d1ae34a68f458cceeb429d1ac0596cd82" exitCode=0 Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.281396 4869 generic.go:334] "Generic (PLEG): container finished" podID="a96afda1-0656-4607-b671-65ace8da5d7b" containerID="3813d207347665549df9186414444bfd8577be70c90451254fc2a551674a5fb5" exitCode=0 Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.281434 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a96afda1-0656-4607-b671-65ace8da5d7b","Type":"ContainerDied","Data":"3dc5aeba2c8fa1c20ec7a1d82a90b0d78b32bc2141d3a2eea87b0b28d89aec83"} Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.281457 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a96afda1-0656-4607-b671-65ace8da5d7b","Type":"ContainerDied","Data":"f6e1c7449026dcdea8b601bec90363c3293832f4ab8a7ef8b2ddf2504f46b659"} Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.281470 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a96afda1-0656-4607-b671-65ace8da5d7b","Type":"ContainerDied","Data":"6fec61e9e5af194ad8d5c4caa520af3d1ae34a68f458cceeb429d1ac0596cd82"} Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.281483 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a96afda1-0656-4607-b671-65ace8da5d7b","Type":"ContainerDied","Data":"3813d207347665549df9186414444bfd8577be70c90451254fc2a551674a5fb5"} Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.304678 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.896299681 podStartE2EDuration="2.304655861s" podCreationTimestamp="2026-01-30 11:14:43 +0000 UTC" firstStartedPulling="2026-01-30 11:14:44.379809812 +0000 UTC m=+1234.929685878" lastFinishedPulling="2026-01-30 11:14:44.788165992 +0000 UTC m=+1235.338042058" observedRunningTime="2026-01-30 11:14:45.297824577 +0000 UTC m=+1235.847700643" watchObservedRunningTime="2026-01-30 11:14:45.304655861 +0000 UTC m=+1235.854531927" Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.328344 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 30 11:14:45 crc kubenswrapper[4869]: W0130 11:14:45.344034 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6fcdb9d5_e409_4d89_a7a9_dc2e2ee022d3.slice/crio-aee265a703f18e42c34543fccfdfa2a744744a861664d42c5a3a33985e9ec71b WatchSource:0}: Error finding container aee265a703f18e42c34543fccfdfa2a744744a861664d42c5a3a33985e9ec71b: Status 404 returned error can't find the container with id aee265a703f18e42c34543fccfdfa2a744744a861664d42c5a3a33985e9ec71b Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.539805 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.539928 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.687397 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.844303 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a96afda1-0656-4607-b671-65ace8da5d7b-combined-ca-bundle\") pod \"a96afda1-0656-4607-b671-65ace8da5d7b\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.844607 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a96afda1-0656-4607-b671-65ace8da5d7b-sg-core-conf-yaml\") pod \"a96afda1-0656-4607-b671-65ace8da5d7b\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.844845 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a96afda1-0656-4607-b671-65ace8da5d7b-run-httpd\") pod \"a96afda1-0656-4607-b671-65ace8da5d7b\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.844900 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m592z\" (UniqueName: \"kubernetes.io/projected/a96afda1-0656-4607-b671-65ace8da5d7b-kube-api-access-m592z\") pod \"a96afda1-0656-4607-b671-65ace8da5d7b\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.844930 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a96afda1-0656-4607-b671-65ace8da5d7b-scripts\") pod \"a96afda1-0656-4607-b671-65ace8da5d7b\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.844951 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a96afda1-0656-4607-b671-65ace8da5d7b-log-httpd\") pod \"a96afda1-0656-4607-b671-65ace8da5d7b\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.844982 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a96afda1-0656-4607-b671-65ace8da5d7b-config-data\") pod \"a96afda1-0656-4607-b671-65ace8da5d7b\" (UID: \"a96afda1-0656-4607-b671-65ace8da5d7b\") " Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.845170 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a96afda1-0656-4607-b671-65ace8da5d7b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a96afda1-0656-4607-b671-65ace8da5d7b" (UID: "a96afda1-0656-4607-b671-65ace8da5d7b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.845483 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a96afda1-0656-4607-b671-65ace8da5d7b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a96afda1-0656-4607-b671-65ace8da5d7b" (UID: "a96afda1-0656-4607-b671-65ace8da5d7b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.845929 4869 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a96afda1-0656-4607-b671-65ace8da5d7b-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.845953 4869 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a96afda1-0656-4607-b671-65ace8da5d7b-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.851113 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a96afda1-0656-4607-b671-65ace8da5d7b-scripts" (OuterVolumeSpecName: "scripts") pod "a96afda1-0656-4607-b671-65ace8da5d7b" (UID: "a96afda1-0656-4607-b671-65ace8da5d7b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.866949 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a96afda1-0656-4607-b671-65ace8da5d7b-kube-api-access-m592z" (OuterVolumeSpecName: "kube-api-access-m592z") pod "a96afda1-0656-4607-b671-65ace8da5d7b" (UID: "a96afda1-0656-4607-b671-65ace8da5d7b"). InnerVolumeSpecName "kube-api-access-m592z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.900291 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a96afda1-0656-4607-b671-65ace8da5d7b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a96afda1-0656-4607-b671-65ace8da5d7b" (UID: "a96afda1-0656-4607-b671-65ace8da5d7b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.943852 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a96afda1-0656-4607-b671-65ace8da5d7b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a96afda1-0656-4607-b671-65ace8da5d7b" (UID: "a96afda1-0656-4607-b671-65ace8da5d7b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.949492 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m592z\" (UniqueName: \"kubernetes.io/projected/a96afda1-0656-4607-b671-65ace8da5d7b-kube-api-access-m592z\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.949528 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a96afda1-0656-4607-b671-65ace8da5d7b-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.949579 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a96afda1-0656-4607-b671-65ace8da5d7b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.949592 4869 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a96afda1-0656-4607-b671-65ace8da5d7b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:45 crc kubenswrapper[4869]: I0130 11:14:45.959786 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a96afda1-0656-4607-b671-65ace8da5d7b-config-data" (OuterVolumeSpecName: "config-data") pod "a96afda1-0656-4607-b671-65ace8da5d7b" (UID: "a96afda1-0656-4607-b671-65ace8da5d7b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.051607 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a96afda1-0656-4607-b671-65ace8da5d7b-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.147473 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="388d68d4-4b57-458f-9d18-3989cee16c04" path="/var/lib/kubelet/pods/388d68d4-4b57-458f-9d18-3989cee16c04/volumes" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.148158 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ccf24997-0c26-4cf8-87d3-52791cd4680c" path="/var/lib/kubelet/pods/ccf24997-0c26-4cf8-87d3-52791cd4680c/volumes" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.328755 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a01e50c7-dd24-4042-a0d9-58a62a6c946f","Type":"ContainerStarted","Data":"ceb876878c7482e1d74a2c9adf2c683331ab53a5dc082b5cf08e6f3b6628c4c0"} Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.329079 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a01e50c7-dd24-4042-a0d9-58a62a6c946f","Type":"ContainerStarted","Data":"aca136a6f6adfb6530797a2d3b2bb8a047dab95a24798eed4dca95d03e23a5c2"} Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.331166 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3","Type":"ContainerStarted","Data":"3c92642881efe9f5809f673b2d84052880941137df94eaee2b527de3860861fb"} Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.331204 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3","Type":"ContainerStarted","Data":"6dba192a329e77bd226b5fda5f266bb2509b0b42b57adc820b126e99973ffcf4"} Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.331219 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3","Type":"ContainerStarted","Data":"aee265a703f18e42c34543fccfdfa2a744744a861664d42c5a3a33985e9ec71b"} Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.339050 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.339274 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a96afda1-0656-4607-b671-65ace8da5d7b","Type":"ContainerDied","Data":"ea6484236fce984114e62329c0dd223951465e62335a846210bcf924cf505152"} Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.339357 4869 scope.go:117] "RemoveContainer" containerID="3dc5aeba2c8fa1c20ec7a1d82a90b0d78b32bc2141d3a2eea87b0b28d89aec83" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.371502 4869 scope.go:117] "RemoveContainer" containerID="f6e1c7449026dcdea8b601bec90363c3293832f4ab8a7ef8b2ddf2504f46b659" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.392597 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.392576499 podStartE2EDuration="2.392576499s" podCreationTimestamp="2026-01-30 11:14:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:14:46.374539397 +0000 UTC m=+1236.924415483" watchObservedRunningTime="2026-01-30 11:14:46.392576499 +0000 UTC m=+1236.942452565" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.394794 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.394778002 podStartE2EDuration="2.394778002s" podCreationTimestamp="2026-01-30 11:14:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:14:46.354122648 +0000 UTC m=+1236.903998734" watchObservedRunningTime="2026-01-30 11:14:46.394778002 +0000 UTC m=+1236.944654068" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.407920 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.418933 4869 scope.go:117] "RemoveContainer" containerID="6fec61e9e5af194ad8d5c4caa520af3d1ae34a68f458cceeb429d1ac0596cd82" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.419102 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.426490 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:14:46 crc kubenswrapper[4869]: E0130 11:14:46.426929 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a96afda1-0656-4607-b671-65ace8da5d7b" containerName="ceilometer-central-agent" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.426943 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a96afda1-0656-4607-b671-65ace8da5d7b" containerName="ceilometer-central-agent" Jan 30 11:14:46 crc kubenswrapper[4869]: E0130 11:14:46.426971 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a96afda1-0656-4607-b671-65ace8da5d7b" containerName="ceilometer-notification-agent" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.426977 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a96afda1-0656-4607-b671-65ace8da5d7b" containerName="ceilometer-notification-agent" Jan 30 11:14:46 crc kubenswrapper[4869]: E0130 11:14:46.426995 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a96afda1-0656-4607-b671-65ace8da5d7b" containerName="sg-core" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.427002 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a96afda1-0656-4607-b671-65ace8da5d7b" containerName="sg-core" Jan 30 11:14:46 crc kubenswrapper[4869]: E0130 11:14:46.427008 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a96afda1-0656-4607-b671-65ace8da5d7b" containerName="proxy-httpd" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.427013 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a96afda1-0656-4607-b671-65ace8da5d7b" containerName="proxy-httpd" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.427186 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a96afda1-0656-4607-b671-65ace8da5d7b" containerName="sg-core" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.427198 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a96afda1-0656-4607-b671-65ace8da5d7b" containerName="ceilometer-notification-agent" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.427215 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a96afda1-0656-4607-b671-65ace8da5d7b" containerName="ceilometer-central-agent" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.427230 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a96afda1-0656-4607-b671-65ace8da5d7b" containerName="proxy-httpd" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.430387 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.432564 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.433139 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.433355 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.440504 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.477445 4869 scope.go:117] "RemoveContainer" containerID="3813d207347665549df9186414444bfd8577be70c90451254fc2a551674a5fb5" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.561526 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-config-data\") pod \"ceilometer-0\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.561636 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.561671 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.561891 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d2c473b-6395-43d0-98ed-a6ba38195d91-log-httpd\") pod \"ceilometer-0\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.562019 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d2c473b-6395-43d0-98ed-a6ba38195d91-run-httpd\") pod \"ceilometer-0\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.562093 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-scripts\") pod \"ceilometer-0\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.562372 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.562411 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rs2l\" (UniqueName: \"kubernetes.io/projected/7d2c473b-6395-43d0-98ed-a6ba38195d91-kube-api-access-6rs2l\") pod \"ceilometer-0\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.665107 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.665191 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.665486 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d2c473b-6395-43d0-98ed-a6ba38195d91-log-httpd\") pod \"ceilometer-0\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.665550 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d2c473b-6395-43d0-98ed-a6ba38195d91-run-httpd\") pod \"ceilometer-0\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.665584 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-scripts\") pod \"ceilometer-0\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.665663 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.665699 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rs2l\" (UniqueName: \"kubernetes.io/projected/7d2c473b-6395-43d0-98ed-a6ba38195d91-kube-api-access-6rs2l\") pod \"ceilometer-0\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.665785 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-config-data\") pod \"ceilometer-0\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.666603 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d2c473b-6395-43d0-98ed-a6ba38195d91-run-httpd\") pod \"ceilometer-0\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.666862 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d2c473b-6395-43d0-98ed-a6ba38195d91-log-httpd\") pod \"ceilometer-0\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.672367 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-config-data\") pod \"ceilometer-0\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.672516 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-scripts\") pod \"ceilometer-0\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.672668 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.673207 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.679650 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.685483 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rs2l\" (UniqueName: \"kubernetes.io/projected/7d2c473b-6395-43d0-98ed-a6ba38195d91-kube-api-access-6rs2l\") pod \"ceilometer-0\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " pod="openstack/ceilometer-0" Jan 30 11:14:46 crc kubenswrapper[4869]: I0130 11:14:46.767974 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:14:47 crc kubenswrapper[4869]: I0130 11:14:47.273424 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:14:47 crc kubenswrapper[4869]: W0130 11:14:47.279879 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7d2c473b_6395_43d0_98ed_a6ba38195d91.slice/crio-f304aef34cce5dd07ed2688d0b3b309f17c648f67f2d491eb57afcac80ef58a7 WatchSource:0}: Error finding container f304aef34cce5dd07ed2688d0b3b309f17c648f67f2d491eb57afcac80ef58a7: Status 404 returned error can't find the container with id f304aef34cce5dd07ed2688d0b3b309f17c648f67f2d491eb57afcac80ef58a7 Jan 30 11:14:47 crc kubenswrapper[4869]: I0130 11:14:47.348411 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7d2c473b-6395-43d0-98ed-a6ba38195d91","Type":"ContainerStarted","Data":"f304aef34cce5dd07ed2688d0b3b309f17c648f67f2d491eb57afcac80ef58a7"} Jan 30 11:14:48 crc kubenswrapper[4869]: I0130 11:14:48.208358 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a96afda1-0656-4607-b671-65ace8da5d7b" path="/var/lib/kubelet/pods/a96afda1-0656-4607-b671-65ace8da5d7b/volumes" Jan 30 11:14:48 crc kubenswrapper[4869]: I0130 11:14:48.360585 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7d2c473b-6395-43d0-98ed-a6ba38195d91","Type":"ContainerStarted","Data":"5050c5a6d866ffb16dcb003916778b4d82b8d814508caa5571dd152f95be7aaa"} Jan 30 11:14:49 crc kubenswrapper[4869]: I0130 11:14:49.374690 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7d2c473b-6395-43d0-98ed-a6ba38195d91","Type":"ContainerStarted","Data":"ded3d056db1dc0adb7b3c4466ef7fa70e89362cb06f4df7f4e70b03cd8e422a3"} Jan 30 11:14:49 crc kubenswrapper[4869]: I0130 11:14:49.512935 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Jan 30 11:14:49 crc kubenswrapper[4869]: I0130 11:14:49.731019 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 30 11:14:50 crc kubenswrapper[4869]: I0130 11:14:50.385782 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7d2c473b-6395-43d0-98ed-a6ba38195d91","Type":"ContainerStarted","Data":"a0ab4a9ef396cf5916cdfa7da8467bfc202253fd84272168c0d2d9da4b2770d6"} Jan 30 11:14:50 crc kubenswrapper[4869]: I0130 11:14:50.540077 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 30 11:14:50 crc kubenswrapper[4869]: I0130 11:14:50.540144 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 30 11:14:51 crc kubenswrapper[4869]: I0130 11:14:51.397458 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7d2c473b-6395-43d0-98ed-a6ba38195d91","Type":"ContainerStarted","Data":"9c4005181f73c5a0ca8893bf8e54c58be54cceeb9b507887eadb80cd71cae0bc"} Jan 30 11:14:51 crc kubenswrapper[4869]: I0130 11:14:51.398028 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 30 11:14:51 crc kubenswrapper[4869]: I0130 11:14:51.554058 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5a9a5962-e1b8-48ff-86c2-6464d47c9077" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 30 11:14:51 crc kubenswrapper[4869]: I0130 11:14:51.554143 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5a9a5962-e1b8-48ff-86c2-6464d47c9077" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 30 11:14:51 crc kubenswrapper[4869]: I0130 11:14:51.769917 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:14:51 crc kubenswrapper[4869]: I0130 11:14:51.769989 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:14:53 crc kubenswrapper[4869]: I0130 11:14:53.742955 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Jan 30 11:14:53 crc kubenswrapper[4869]: I0130 11:14:53.767823 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=4.214608488 podStartE2EDuration="7.767799766s" podCreationTimestamp="2026-01-30 11:14:46 +0000 UTC" firstStartedPulling="2026-01-30 11:14:47.282642692 +0000 UTC m=+1237.832518748" lastFinishedPulling="2026-01-30 11:14:50.83583396 +0000 UTC m=+1241.385710026" observedRunningTime="2026-01-30 11:14:51.433239805 +0000 UTC m=+1241.983115881" watchObservedRunningTime="2026-01-30 11:14:53.767799766 +0000 UTC m=+1244.317675832" Jan 30 11:14:54 crc kubenswrapper[4869]: I0130 11:14:54.732784 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 30 11:14:54 crc kubenswrapper[4869]: I0130 11:14:54.760433 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 30 11:14:54 crc kubenswrapper[4869]: I0130 11:14:54.776268 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 30 11:14:54 crc kubenswrapper[4869]: I0130 11:14:54.776333 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 30 11:14:55 crc kubenswrapper[4869]: I0130 11:14:55.460210 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 30 11:14:55 crc kubenswrapper[4869]: I0130 11:14:55.856938 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.196:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 30 11:14:55 crc kubenswrapper[4869]: I0130 11:14:55.857008 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.196:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 30 11:15:00 crc kubenswrapper[4869]: I0130 11:15:00.146694 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496195-7ln2c"] Jan 30 11:15:00 crc kubenswrapper[4869]: I0130 11:15:00.148733 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496195-7ln2c" Jan 30 11:15:00 crc kubenswrapper[4869]: I0130 11:15:00.150477 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 30 11:15:00 crc kubenswrapper[4869]: I0130 11:15:00.160925 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 30 11:15:00 crc kubenswrapper[4869]: I0130 11:15:00.164402 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496195-7ln2c"] Jan 30 11:15:00 crc kubenswrapper[4869]: I0130 11:15:00.234140 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b-config-volume\") pod \"collect-profiles-29496195-7ln2c\" (UID: \"e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496195-7ln2c" Jan 30 11:15:00 crc kubenswrapper[4869]: I0130 11:15:00.234543 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b-secret-volume\") pod \"collect-profiles-29496195-7ln2c\" (UID: \"e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496195-7ln2c" Jan 30 11:15:00 crc kubenswrapper[4869]: I0130 11:15:00.234590 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgwmk\" (UniqueName: \"kubernetes.io/projected/e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b-kube-api-access-lgwmk\") pod \"collect-profiles-29496195-7ln2c\" (UID: \"e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496195-7ln2c" Jan 30 11:15:00 crc kubenswrapper[4869]: I0130 11:15:00.337421 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b-config-volume\") pod \"collect-profiles-29496195-7ln2c\" (UID: \"e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496195-7ln2c" Jan 30 11:15:00 crc kubenswrapper[4869]: I0130 11:15:00.337474 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b-secret-volume\") pod \"collect-profiles-29496195-7ln2c\" (UID: \"e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496195-7ln2c" Jan 30 11:15:00 crc kubenswrapper[4869]: I0130 11:15:00.337507 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgwmk\" (UniqueName: \"kubernetes.io/projected/e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b-kube-api-access-lgwmk\") pod \"collect-profiles-29496195-7ln2c\" (UID: \"e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496195-7ln2c" Jan 30 11:15:00 crc kubenswrapper[4869]: I0130 11:15:00.338482 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b-config-volume\") pod \"collect-profiles-29496195-7ln2c\" (UID: \"e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496195-7ln2c" Jan 30 11:15:00 crc kubenswrapper[4869]: I0130 11:15:00.357817 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b-secret-volume\") pod \"collect-profiles-29496195-7ln2c\" (UID: \"e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496195-7ln2c" Jan 30 11:15:00 crc kubenswrapper[4869]: I0130 11:15:00.365052 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgwmk\" (UniqueName: \"kubernetes.io/projected/e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b-kube-api-access-lgwmk\") pod \"collect-profiles-29496195-7ln2c\" (UID: \"e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496195-7ln2c" Jan 30 11:15:00 crc kubenswrapper[4869]: I0130 11:15:00.478209 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496195-7ln2c" Jan 30 11:15:00 crc kubenswrapper[4869]: I0130 11:15:00.550691 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 30 11:15:00 crc kubenswrapper[4869]: I0130 11:15:00.551257 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 30 11:15:00 crc kubenswrapper[4869]: I0130 11:15:00.556913 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 30 11:15:00 crc kubenswrapper[4869]: I0130 11:15:00.977151 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496195-7ln2c"] Jan 30 11:15:01 crc kubenswrapper[4869]: I0130 11:15:01.478552 4869 generic.go:334] "Generic (PLEG): container finished" podID="e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b" containerID="e728007b89aeb0f94800a4be49a30a3fe81b587263865831d35850b63d9849f0" exitCode=0 Jan 30 11:15:01 crc kubenswrapper[4869]: I0130 11:15:01.478635 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496195-7ln2c" event={"ID":"e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b","Type":"ContainerDied","Data":"e728007b89aeb0f94800a4be49a30a3fe81b587263865831d35850b63d9849f0"} Jan 30 11:15:01 crc kubenswrapper[4869]: I0130 11:15:01.479314 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496195-7ln2c" event={"ID":"e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b","Type":"ContainerStarted","Data":"de52e94dbfb918e12538ba122e3289c8766e97d7634a54ff22cb3f0d4ba9429e"} Jan 30 11:15:01 crc kubenswrapper[4869]: I0130 11:15:01.485818 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.393493 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.475213 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8de6ec04-fd9e-4901-a3f8-39a0d71e9d18-config-data\") pod \"8de6ec04-fd9e-4901-a3f8-39a0d71e9d18\" (UID: \"8de6ec04-fd9e-4901-a3f8-39a0d71e9d18\") " Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.475336 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8de6ec04-fd9e-4901-a3f8-39a0d71e9d18-combined-ca-bundle\") pod \"8de6ec04-fd9e-4901-a3f8-39a0d71e9d18\" (UID: \"8de6ec04-fd9e-4901-a3f8-39a0d71e9d18\") " Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.475373 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bldth\" (UniqueName: \"kubernetes.io/projected/8de6ec04-fd9e-4901-a3f8-39a0d71e9d18-kube-api-access-bldth\") pod \"8de6ec04-fd9e-4901-a3f8-39a0d71e9d18\" (UID: \"8de6ec04-fd9e-4901-a3f8-39a0d71e9d18\") " Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.481961 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8de6ec04-fd9e-4901-a3f8-39a0d71e9d18-kube-api-access-bldth" (OuterVolumeSpecName: "kube-api-access-bldth") pod "8de6ec04-fd9e-4901-a3f8-39a0d71e9d18" (UID: "8de6ec04-fd9e-4901-a3f8-39a0d71e9d18"). InnerVolumeSpecName "kube-api-access-bldth". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.490488 4869 generic.go:334] "Generic (PLEG): container finished" podID="8de6ec04-fd9e-4901-a3f8-39a0d71e9d18" containerID="ce6cd42b5b4b6d06c455c6e010b86b0d5777baacce1dfa088fa570a20f7516b5" exitCode=137 Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.490660 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.491602 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8de6ec04-fd9e-4901-a3f8-39a0d71e9d18","Type":"ContainerDied","Data":"ce6cd42b5b4b6d06c455c6e010b86b0d5777baacce1dfa088fa570a20f7516b5"} Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.491635 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8de6ec04-fd9e-4901-a3f8-39a0d71e9d18","Type":"ContainerDied","Data":"2e61d7139895c560297e90f35607f1588b9a2984dd509eb1384aea974307a074"} Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.491653 4869 scope.go:117] "RemoveContainer" containerID="ce6cd42b5b4b6d06c455c6e010b86b0d5777baacce1dfa088fa570a20f7516b5" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.503784 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8de6ec04-fd9e-4901-a3f8-39a0d71e9d18-config-data" (OuterVolumeSpecName: "config-data") pod "8de6ec04-fd9e-4901-a3f8-39a0d71e9d18" (UID: "8de6ec04-fd9e-4901-a3f8-39a0d71e9d18"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.509073 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8de6ec04-fd9e-4901-a3f8-39a0d71e9d18-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8de6ec04-fd9e-4901-a3f8-39a0d71e9d18" (UID: "8de6ec04-fd9e-4901-a3f8-39a0d71e9d18"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.577903 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8de6ec04-fd9e-4901-a3f8-39a0d71e9d18-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.577947 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bldth\" (UniqueName: \"kubernetes.io/projected/8de6ec04-fd9e-4901-a3f8-39a0d71e9d18-kube-api-access-bldth\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.577963 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8de6ec04-fd9e-4901-a3f8-39a0d71e9d18-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.586850 4869 scope.go:117] "RemoveContainer" containerID="ce6cd42b5b4b6d06c455c6e010b86b0d5777baacce1dfa088fa570a20f7516b5" Jan 30 11:15:02 crc kubenswrapper[4869]: E0130 11:15:02.587259 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce6cd42b5b4b6d06c455c6e010b86b0d5777baacce1dfa088fa570a20f7516b5\": container with ID starting with ce6cd42b5b4b6d06c455c6e010b86b0d5777baacce1dfa088fa570a20f7516b5 not found: ID does not exist" containerID="ce6cd42b5b4b6d06c455c6e010b86b0d5777baacce1dfa088fa570a20f7516b5" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.587304 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce6cd42b5b4b6d06c455c6e010b86b0d5777baacce1dfa088fa570a20f7516b5"} err="failed to get container status \"ce6cd42b5b4b6d06c455c6e010b86b0d5777baacce1dfa088fa570a20f7516b5\": rpc error: code = NotFound desc = could not find container \"ce6cd42b5b4b6d06c455c6e010b86b0d5777baacce1dfa088fa570a20f7516b5\": container with ID starting with ce6cd42b5b4b6d06c455c6e010b86b0d5777baacce1dfa088fa570a20f7516b5 not found: ID does not exist" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.868346 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496195-7ln2c" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.873757 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.884574 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b-config-volume\") pod \"e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b\" (UID: \"e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b\") " Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.884734 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b-secret-volume\") pod \"e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b\" (UID: \"e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b\") " Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.885141 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lgwmk\" (UniqueName: \"kubernetes.io/projected/e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b-kube-api-access-lgwmk\") pod \"e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b\" (UID: \"e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b\") " Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.886823 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b-config-volume" (OuterVolumeSpecName: "config-volume") pod "e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b" (UID: "e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.889654 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.891423 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b-kube-api-access-lgwmk" (OuterVolumeSpecName: "kube-api-access-lgwmk") pod "e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b" (UID: "e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b"). InnerVolumeSpecName "kube-api-access-lgwmk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.917605 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 30 11:15:02 crc kubenswrapper[4869]: E0130 11:15:02.918342 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8de6ec04-fd9e-4901-a3f8-39a0d71e9d18" containerName="nova-cell1-novncproxy-novncproxy" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.918359 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="8de6ec04-fd9e-4901-a3f8-39a0d71e9d18" containerName="nova-cell1-novncproxy-novncproxy" Jan 30 11:15:02 crc kubenswrapper[4869]: E0130 11:15:02.918383 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b" containerName="collect-profiles" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.918391 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b" containerName="collect-profiles" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.918579 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b" containerName="collect-profiles" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.918600 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="8de6ec04-fd9e-4901-a3f8-39a0d71e9d18" containerName="nova-cell1-novncproxy-novncproxy" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.919365 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.917651 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b" (UID: "e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.922252 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.922660 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.923031 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.929675 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.987662 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5qws\" (UniqueName: \"kubernetes.io/projected/f8f9cd63-d585-4053-b25b-3c0947f43755-kube-api-access-f5qws\") pod \"nova-cell1-novncproxy-0\" (UID: \"f8f9cd63-d585-4053-b25b-3c0947f43755\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.987853 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8f9cd63-d585-4053-b25b-3c0947f43755-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"f8f9cd63-d585-4053-b25b-3c0947f43755\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.987909 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8f9cd63-d585-4053-b25b-3c0947f43755-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"f8f9cd63-d585-4053-b25b-3c0947f43755\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.988026 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8f9cd63-d585-4053-b25b-3c0947f43755-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"f8f9cd63-d585-4053-b25b-3c0947f43755\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.988124 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8f9cd63-d585-4053-b25b-3c0947f43755-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"f8f9cd63-d585-4053-b25b-3c0947f43755\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.988193 4869 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b-config-volume\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.988205 4869 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:02 crc kubenswrapper[4869]: I0130 11:15:02.988215 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lgwmk\" (UniqueName: \"kubernetes.io/projected/e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b-kube-api-access-lgwmk\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:03 crc kubenswrapper[4869]: I0130 11:15:03.090276 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8f9cd63-d585-4053-b25b-3c0947f43755-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"f8f9cd63-d585-4053-b25b-3c0947f43755\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:15:03 crc kubenswrapper[4869]: I0130 11:15:03.090360 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8f9cd63-d585-4053-b25b-3c0947f43755-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"f8f9cd63-d585-4053-b25b-3c0947f43755\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:15:03 crc kubenswrapper[4869]: I0130 11:15:03.090402 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8f9cd63-d585-4053-b25b-3c0947f43755-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"f8f9cd63-d585-4053-b25b-3c0947f43755\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:15:03 crc kubenswrapper[4869]: I0130 11:15:03.090440 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5qws\" (UniqueName: \"kubernetes.io/projected/f8f9cd63-d585-4053-b25b-3c0947f43755-kube-api-access-f5qws\") pod \"nova-cell1-novncproxy-0\" (UID: \"f8f9cd63-d585-4053-b25b-3c0947f43755\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:15:03 crc kubenswrapper[4869]: I0130 11:15:03.090517 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8f9cd63-d585-4053-b25b-3c0947f43755-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"f8f9cd63-d585-4053-b25b-3c0947f43755\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:15:03 crc kubenswrapper[4869]: I0130 11:15:03.095881 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8f9cd63-d585-4053-b25b-3c0947f43755-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"f8f9cd63-d585-4053-b25b-3c0947f43755\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:15:03 crc kubenswrapper[4869]: I0130 11:15:03.096349 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8f9cd63-d585-4053-b25b-3c0947f43755-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"f8f9cd63-d585-4053-b25b-3c0947f43755\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:15:03 crc kubenswrapper[4869]: I0130 11:15:03.097340 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8f9cd63-d585-4053-b25b-3c0947f43755-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"f8f9cd63-d585-4053-b25b-3c0947f43755\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:15:03 crc kubenswrapper[4869]: I0130 11:15:03.097630 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8f9cd63-d585-4053-b25b-3c0947f43755-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"f8f9cd63-d585-4053-b25b-3c0947f43755\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:15:03 crc kubenswrapper[4869]: I0130 11:15:03.108664 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5qws\" (UniqueName: \"kubernetes.io/projected/f8f9cd63-d585-4053-b25b-3c0947f43755-kube-api-access-f5qws\") pod \"nova-cell1-novncproxy-0\" (UID: \"f8f9cd63-d585-4053-b25b-3c0947f43755\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:15:03 crc kubenswrapper[4869]: I0130 11:15:03.262192 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:15:03 crc kubenswrapper[4869]: I0130 11:15:03.503640 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496195-7ln2c" event={"ID":"e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b","Type":"ContainerDied","Data":"de52e94dbfb918e12538ba122e3289c8766e97d7634a54ff22cb3f0d4ba9429e"} Jan 30 11:15:03 crc kubenswrapper[4869]: I0130 11:15:03.503901 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de52e94dbfb918e12538ba122e3289c8766e97d7634a54ff22cb3f0d4ba9429e" Jan 30 11:15:03 crc kubenswrapper[4869]: I0130 11:15:03.503700 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496195-7ln2c" Jan 30 11:15:03 crc kubenswrapper[4869]: I0130 11:15:03.727239 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 30 11:15:04 crc kubenswrapper[4869]: I0130 11:15:04.143290 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8de6ec04-fd9e-4901-a3f8-39a0d71e9d18" path="/var/lib/kubelet/pods/8de6ec04-fd9e-4901-a3f8-39a0d71e9d18/volumes" Jan 30 11:15:04 crc kubenswrapper[4869]: I0130 11:15:04.515568 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"f8f9cd63-d585-4053-b25b-3c0947f43755","Type":"ContainerStarted","Data":"80fd0e1a128b125455b0f3582efabeca4bc9e7c9682db967e54b192f2d8a8aa0"} Jan 30 11:15:04 crc kubenswrapper[4869]: I0130 11:15:04.515615 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"f8f9cd63-d585-4053-b25b-3c0947f43755","Type":"ContainerStarted","Data":"43a7a6544d47d5d739aa8f18cf2928b603890be3121d118520247f97a26db8fe"} Jan 30 11:15:04 crc kubenswrapper[4869]: I0130 11:15:04.550009 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.549968179 podStartE2EDuration="2.549968179s" podCreationTimestamp="2026-01-30 11:15:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:15:04.532520954 +0000 UTC m=+1255.082397050" watchObservedRunningTime="2026-01-30 11:15:04.549968179 +0000 UTC m=+1255.099844285" Jan 30 11:15:04 crc kubenswrapper[4869]: I0130 11:15:04.779122 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 30 11:15:04 crc kubenswrapper[4869]: I0130 11:15:04.779791 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 30 11:15:04 crc kubenswrapper[4869]: I0130 11:15:04.780745 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 30 11:15:04 crc kubenswrapper[4869]: I0130 11:15:04.790560 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 30 11:15:05 crc kubenswrapper[4869]: I0130 11:15:05.522875 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 30 11:15:05 crc kubenswrapper[4869]: I0130 11:15:05.526120 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 30 11:15:05 crc kubenswrapper[4869]: I0130 11:15:05.697842 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-r98zg"] Jan 30 11:15:05 crc kubenswrapper[4869]: I0130 11:15:05.699614 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" Jan 30 11:15:05 crc kubenswrapper[4869]: I0130 11:15:05.724806 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-r98zg"] Jan 30 11:15:05 crc kubenswrapper[4869]: I0130 11:15:05.847832 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-r98zg\" (UID: \"c446ec70-c06c-4301-987c-423882ca1469\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" Jan 30 11:15:05 crc kubenswrapper[4869]: I0130 11:15:05.847911 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29lc9\" (UniqueName: \"kubernetes.io/projected/c446ec70-c06c-4301-987c-423882ca1469-kube-api-access-29lc9\") pod \"dnsmasq-dns-cd5cbd7b9-r98zg\" (UID: \"c446ec70-c06c-4301-987c-423882ca1469\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" Jan 30 11:15:05 crc kubenswrapper[4869]: I0130 11:15:05.847955 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-r98zg\" (UID: \"c446ec70-c06c-4301-987c-423882ca1469\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" Jan 30 11:15:05 crc kubenswrapper[4869]: I0130 11:15:05.847973 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-r98zg\" (UID: \"c446ec70-c06c-4301-987c-423882ca1469\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" Jan 30 11:15:05 crc kubenswrapper[4869]: I0130 11:15:05.848143 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-config\") pod \"dnsmasq-dns-cd5cbd7b9-r98zg\" (UID: \"c446ec70-c06c-4301-987c-423882ca1469\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" Jan 30 11:15:05 crc kubenswrapper[4869]: I0130 11:15:05.848240 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-r98zg\" (UID: \"c446ec70-c06c-4301-987c-423882ca1469\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" Jan 30 11:15:05 crc kubenswrapper[4869]: I0130 11:15:05.950143 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-r98zg\" (UID: \"c446ec70-c06c-4301-987c-423882ca1469\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" Jan 30 11:15:05 crc kubenswrapper[4869]: I0130 11:15:05.950236 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-r98zg\" (UID: \"c446ec70-c06c-4301-987c-423882ca1469\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" Jan 30 11:15:05 crc kubenswrapper[4869]: I0130 11:15:05.950333 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29lc9\" (UniqueName: \"kubernetes.io/projected/c446ec70-c06c-4301-987c-423882ca1469-kube-api-access-29lc9\") pod \"dnsmasq-dns-cd5cbd7b9-r98zg\" (UID: \"c446ec70-c06c-4301-987c-423882ca1469\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" Jan 30 11:15:05 crc kubenswrapper[4869]: I0130 11:15:05.950380 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-r98zg\" (UID: \"c446ec70-c06c-4301-987c-423882ca1469\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" Jan 30 11:15:05 crc kubenswrapper[4869]: I0130 11:15:05.950401 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-r98zg\" (UID: \"c446ec70-c06c-4301-987c-423882ca1469\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" Jan 30 11:15:05 crc kubenswrapper[4869]: I0130 11:15:05.950474 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-config\") pod \"dnsmasq-dns-cd5cbd7b9-r98zg\" (UID: \"c446ec70-c06c-4301-987c-423882ca1469\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" Jan 30 11:15:05 crc kubenswrapper[4869]: I0130 11:15:05.951477 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-config\") pod \"dnsmasq-dns-cd5cbd7b9-r98zg\" (UID: \"c446ec70-c06c-4301-987c-423882ca1469\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" Jan 30 11:15:05 crc kubenswrapper[4869]: I0130 11:15:05.952204 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-r98zg\" (UID: \"c446ec70-c06c-4301-987c-423882ca1469\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" Jan 30 11:15:05 crc kubenswrapper[4869]: I0130 11:15:05.952751 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-r98zg\" (UID: \"c446ec70-c06c-4301-987c-423882ca1469\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" Jan 30 11:15:05 crc kubenswrapper[4869]: I0130 11:15:05.953519 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-r98zg\" (UID: \"c446ec70-c06c-4301-987c-423882ca1469\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" Jan 30 11:15:05 crc kubenswrapper[4869]: I0130 11:15:05.954063 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-r98zg\" (UID: \"c446ec70-c06c-4301-987c-423882ca1469\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" Jan 30 11:15:05 crc kubenswrapper[4869]: I0130 11:15:05.975006 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29lc9\" (UniqueName: \"kubernetes.io/projected/c446ec70-c06c-4301-987c-423882ca1469-kube-api-access-29lc9\") pod \"dnsmasq-dns-cd5cbd7b9-r98zg\" (UID: \"c446ec70-c06c-4301-987c-423882ca1469\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" Jan 30 11:15:06 crc kubenswrapper[4869]: I0130 11:15:06.026269 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" Jan 30 11:15:06 crc kubenswrapper[4869]: I0130 11:15:06.559196 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-r98zg"] Jan 30 11:15:07 crc kubenswrapper[4869]: I0130 11:15:07.539572 4869 generic.go:334] "Generic (PLEG): container finished" podID="c446ec70-c06c-4301-987c-423882ca1469" containerID="96a5b1a816c46859aaf489cb9698ab2c6768310099b7bff27f10d01862c2418e" exitCode=0 Jan 30 11:15:07 crc kubenswrapper[4869]: I0130 11:15:07.539680 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" event={"ID":"c446ec70-c06c-4301-987c-423882ca1469","Type":"ContainerDied","Data":"96a5b1a816c46859aaf489cb9698ab2c6768310099b7bff27f10d01862c2418e"} Jan 30 11:15:07 crc kubenswrapper[4869]: I0130 11:15:07.540405 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" event={"ID":"c446ec70-c06c-4301-987c-423882ca1469","Type":"ContainerStarted","Data":"0e92cac8745e164845a2872a8982b9ab3d928d8d308092cb8e265a5afd40fc6c"} Jan 30 11:15:08 crc kubenswrapper[4869]: I0130 11:15:08.262281 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:15:08 crc kubenswrapper[4869]: I0130 11:15:08.305912 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:15:08 crc kubenswrapper[4869]: I0130 11:15:08.306241 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7d2c473b-6395-43d0-98ed-a6ba38195d91" containerName="ceilometer-central-agent" containerID="cri-o://5050c5a6d866ffb16dcb003916778b4d82b8d814508caa5571dd152f95be7aaa" gracePeriod=30 Jan 30 11:15:08 crc kubenswrapper[4869]: I0130 11:15:08.306375 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7d2c473b-6395-43d0-98ed-a6ba38195d91" containerName="proxy-httpd" containerID="cri-o://9c4005181f73c5a0ca8893bf8e54c58be54cceeb9b507887eadb80cd71cae0bc" gracePeriod=30 Jan 30 11:15:08 crc kubenswrapper[4869]: I0130 11:15:08.306373 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7d2c473b-6395-43d0-98ed-a6ba38195d91" containerName="sg-core" containerID="cri-o://a0ab4a9ef396cf5916cdfa7da8467bfc202253fd84272168c0d2d9da4b2770d6" gracePeriod=30 Jan 30 11:15:08 crc kubenswrapper[4869]: I0130 11:15:08.306373 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7d2c473b-6395-43d0-98ed-a6ba38195d91" containerName="ceilometer-notification-agent" containerID="cri-o://ded3d056db1dc0adb7b3c4466ef7fa70e89362cb06f4df7f4e70b03cd8e422a3" gracePeriod=30 Jan 30 11:15:08 crc kubenswrapper[4869]: I0130 11:15:08.320058 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="7d2c473b-6395-43d0-98ed-a6ba38195d91" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.197:3000/\": read tcp 10.217.0.2:58326->10.217.0.197:3000: read: connection reset by peer" Jan 30 11:15:08 crc kubenswrapper[4869]: I0130 11:15:08.468593 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 30 11:15:08 crc kubenswrapper[4869]: I0130 11:15:08.551159 4869 generic.go:334] "Generic (PLEG): container finished" podID="7d2c473b-6395-43d0-98ed-a6ba38195d91" containerID="9c4005181f73c5a0ca8893bf8e54c58be54cceeb9b507887eadb80cd71cae0bc" exitCode=0 Jan 30 11:15:08 crc kubenswrapper[4869]: I0130 11:15:08.551202 4869 generic.go:334] "Generic (PLEG): container finished" podID="7d2c473b-6395-43d0-98ed-a6ba38195d91" containerID="a0ab4a9ef396cf5916cdfa7da8467bfc202253fd84272168c0d2d9da4b2770d6" exitCode=2 Jan 30 11:15:08 crc kubenswrapper[4869]: I0130 11:15:08.551249 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7d2c473b-6395-43d0-98ed-a6ba38195d91","Type":"ContainerDied","Data":"9c4005181f73c5a0ca8893bf8e54c58be54cceeb9b507887eadb80cd71cae0bc"} Jan 30 11:15:08 crc kubenswrapper[4869]: I0130 11:15:08.551283 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7d2c473b-6395-43d0-98ed-a6ba38195d91","Type":"ContainerDied","Data":"a0ab4a9ef396cf5916cdfa7da8467bfc202253fd84272168c0d2d9da4b2770d6"} Jan 30 11:15:08 crc kubenswrapper[4869]: I0130 11:15:08.553425 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3" containerName="nova-api-log" containerID="cri-o://6dba192a329e77bd226b5fda5f266bb2509b0b42b57adc820b126e99973ffcf4" gracePeriod=30 Jan 30 11:15:08 crc kubenswrapper[4869]: I0130 11:15:08.554629 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" event={"ID":"c446ec70-c06c-4301-987c-423882ca1469","Type":"ContainerStarted","Data":"ac0db009767c9d20a2e2fbda72d982d36dc79b4ff0b0a3d504db1abca9f191e3"} Jan 30 11:15:08 crc kubenswrapper[4869]: I0130 11:15:08.554668 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" Jan 30 11:15:08 crc kubenswrapper[4869]: I0130 11:15:08.554904 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3" containerName="nova-api-api" containerID="cri-o://3c92642881efe9f5809f673b2d84052880941137df94eaee2b527de3860861fb" gracePeriod=30 Jan 30 11:15:08 crc kubenswrapper[4869]: I0130 11:15:08.607740 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" podStartSLOduration=3.607688487 podStartE2EDuration="3.607688487s" podCreationTimestamp="2026-01-30 11:15:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:15:08.599213866 +0000 UTC m=+1259.149089932" watchObservedRunningTime="2026-01-30 11:15:08.607688487 +0000 UTC m=+1259.157564553" Jan 30 11:15:09 crc kubenswrapper[4869]: I0130 11:15:09.565961 4869 generic.go:334] "Generic (PLEG): container finished" podID="7d2c473b-6395-43d0-98ed-a6ba38195d91" containerID="5050c5a6d866ffb16dcb003916778b4d82b8d814508caa5571dd152f95be7aaa" exitCode=0 Jan 30 11:15:09 crc kubenswrapper[4869]: I0130 11:15:09.566039 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7d2c473b-6395-43d0-98ed-a6ba38195d91","Type":"ContainerDied","Data":"5050c5a6d866ffb16dcb003916778b4d82b8d814508caa5571dd152f95be7aaa"} Jan 30 11:15:09 crc kubenswrapper[4869]: I0130 11:15:09.568001 4869 generic.go:334] "Generic (PLEG): container finished" podID="6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3" containerID="6dba192a329e77bd226b5fda5f266bb2509b0b42b57adc820b126e99973ffcf4" exitCode=143 Jan 30 11:15:09 crc kubenswrapper[4869]: I0130 11:15:09.568054 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3","Type":"ContainerDied","Data":"6dba192a329e77bd226b5fda5f266bb2509b0b42b57adc820b126e99973ffcf4"} Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.267584 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.457067 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-sg-core-conf-yaml\") pod \"7d2c473b-6395-43d0-98ed-a6ba38195d91\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.457115 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-scripts\") pod \"7d2c473b-6395-43d0-98ed-a6ba38195d91\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.457153 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-combined-ca-bundle\") pod \"7d2c473b-6395-43d0-98ed-a6ba38195d91\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.457175 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-ceilometer-tls-certs\") pod \"7d2c473b-6395-43d0-98ed-a6ba38195d91\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.457254 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d2c473b-6395-43d0-98ed-a6ba38195d91-log-httpd\") pod \"7d2c473b-6395-43d0-98ed-a6ba38195d91\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.457419 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-config-data\") pod \"7d2c473b-6395-43d0-98ed-a6ba38195d91\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.457489 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6rs2l\" (UniqueName: \"kubernetes.io/projected/7d2c473b-6395-43d0-98ed-a6ba38195d91-kube-api-access-6rs2l\") pod \"7d2c473b-6395-43d0-98ed-a6ba38195d91\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.457542 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d2c473b-6395-43d0-98ed-a6ba38195d91-run-httpd\") pod \"7d2c473b-6395-43d0-98ed-a6ba38195d91\" (UID: \"7d2c473b-6395-43d0-98ed-a6ba38195d91\") " Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.458031 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d2c473b-6395-43d0-98ed-a6ba38195d91-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7d2c473b-6395-43d0-98ed-a6ba38195d91" (UID: "7d2c473b-6395-43d0-98ed-a6ba38195d91"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.458170 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d2c473b-6395-43d0-98ed-a6ba38195d91-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7d2c473b-6395-43d0-98ed-a6ba38195d91" (UID: "7d2c473b-6395-43d0-98ed-a6ba38195d91"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.463509 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d2c473b-6395-43d0-98ed-a6ba38195d91-kube-api-access-6rs2l" (OuterVolumeSpecName: "kube-api-access-6rs2l") pod "7d2c473b-6395-43d0-98ed-a6ba38195d91" (UID: "7d2c473b-6395-43d0-98ed-a6ba38195d91"). InnerVolumeSpecName "kube-api-access-6rs2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.463548 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-scripts" (OuterVolumeSpecName: "scripts") pod "7d2c473b-6395-43d0-98ed-a6ba38195d91" (UID: "7d2c473b-6395-43d0-98ed-a6ba38195d91"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.488461 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7d2c473b-6395-43d0-98ed-a6ba38195d91" (UID: "7d2c473b-6395-43d0-98ed-a6ba38195d91"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.537155 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "7d2c473b-6395-43d0-98ed-a6ba38195d91" (UID: "7d2c473b-6395-43d0-98ed-a6ba38195d91"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.556887 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7d2c473b-6395-43d0-98ed-a6ba38195d91" (UID: "7d2c473b-6395-43d0-98ed-a6ba38195d91"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.560234 4869 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d2c473b-6395-43d0-98ed-a6ba38195d91-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.560262 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6rs2l\" (UniqueName: \"kubernetes.io/projected/7d2c473b-6395-43d0-98ed-a6ba38195d91-kube-api-access-6rs2l\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.560276 4869 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d2c473b-6395-43d0-98ed-a6ba38195d91-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.560286 4869 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.560297 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.560319 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.560332 4869 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.574946 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-config-data" (OuterVolumeSpecName: "config-data") pod "7d2c473b-6395-43d0-98ed-a6ba38195d91" (UID: "7d2c473b-6395-43d0-98ed-a6ba38195d91"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.604219 4869 generic.go:334] "Generic (PLEG): container finished" podID="7d2c473b-6395-43d0-98ed-a6ba38195d91" containerID="ded3d056db1dc0adb7b3c4466ef7fa70e89362cb06f4df7f4e70b03cd8e422a3" exitCode=0 Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.604270 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7d2c473b-6395-43d0-98ed-a6ba38195d91","Type":"ContainerDied","Data":"ded3d056db1dc0adb7b3c4466ef7fa70e89362cb06f4df7f4e70b03cd8e422a3"} Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.604306 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7d2c473b-6395-43d0-98ed-a6ba38195d91","Type":"ContainerDied","Data":"f304aef34cce5dd07ed2688d0b3b309f17c648f67f2d491eb57afcac80ef58a7"} Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.604333 4869 scope.go:117] "RemoveContainer" containerID="9c4005181f73c5a0ca8893bf8e54c58be54cceeb9b507887eadb80cd71cae0bc" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.604502 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.624263 4869 scope.go:117] "RemoveContainer" containerID="a0ab4a9ef396cf5916cdfa7da8467bfc202253fd84272168c0d2d9da4b2770d6" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.645075 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.650109 4869 scope.go:117] "RemoveContainer" containerID="ded3d056db1dc0adb7b3c4466ef7fa70e89362cb06f4df7f4e70b03cd8e422a3" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.655483 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.662017 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d2c473b-6395-43d0-98ed-a6ba38195d91-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.678266 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:15:10 crc kubenswrapper[4869]: E0130 11:15:10.678730 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d2c473b-6395-43d0-98ed-a6ba38195d91" containerName="ceilometer-notification-agent" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.678754 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d2c473b-6395-43d0-98ed-a6ba38195d91" containerName="ceilometer-notification-agent" Jan 30 11:15:10 crc kubenswrapper[4869]: E0130 11:15:10.678777 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d2c473b-6395-43d0-98ed-a6ba38195d91" containerName="ceilometer-central-agent" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.678785 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d2c473b-6395-43d0-98ed-a6ba38195d91" containerName="ceilometer-central-agent" Jan 30 11:15:10 crc kubenswrapper[4869]: E0130 11:15:10.678795 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d2c473b-6395-43d0-98ed-a6ba38195d91" containerName="sg-core" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.678803 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d2c473b-6395-43d0-98ed-a6ba38195d91" containerName="sg-core" Jan 30 11:15:10 crc kubenswrapper[4869]: E0130 11:15:10.678835 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d2c473b-6395-43d0-98ed-a6ba38195d91" containerName="proxy-httpd" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.678842 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d2c473b-6395-43d0-98ed-a6ba38195d91" containerName="proxy-httpd" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.679066 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d2c473b-6395-43d0-98ed-a6ba38195d91" containerName="proxy-httpd" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.679103 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d2c473b-6395-43d0-98ed-a6ba38195d91" containerName="ceilometer-central-agent" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.679114 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d2c473b-6395-43d0-98ed-a6ba38195d91" containerName="ceilometer-notification-agent" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.679152 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d2c473b-6395-43d0-98ed-a6ba38195d91" containerName="sg-core" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.681334 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.684639 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.688913 4869 scope.go:117] "RemoveContainer" containerID="5050c5a6d866ffb16dcb003916778b4d82b8d814508caa5571dd152f95be7aaa" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.689551 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.689808 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.691747 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.727579 4869 scope.go:117] "RemoveContainer" containerID="9c4005181f73c5a0ca8893bf8e54c58be54cceeb9b507887eadb80cd71cae0bc" Jan 30 11:15:10 crc kubenswrapper[4869]: E0130 11:15:10.728063 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c4005181f73c5a0ca8893bf8e54c58be54cceeb9b507887eadb80cd71cae0bc\": container with ID starting with 9c4005181f73c5a0ca8893bf8e54c58be54cceeb9b507887eadb80cd71cae0bc not found: ID does not exist" containerID="9c4005181f73c5a0ca8893bf8e54c58be54cceeb9b507887eadb80cd71cae0bc" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.728187 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c4005181f73c5a0ca8893bf8e54c58be54cceeb9b507887eadb80cd71cae0bc"} err="failed to get container status \"9c4005181f73c5a0ca8893bf8e54c58be54cceeb9b507887eadb80cd71cae0bc\": rpc error: code = NotFound desc = could not find container \"9c4005181f73c5a0ca8893bf8e54c58be54cceeb9b507887eadb80cd71cae0bc\": container with ID starting with 9c4005181f73c5a0ca8893bf8e54c58be54cceeb9b507887eadb80cd71cae0bc not found: ID does not exist" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.728306 4869 scope.go:117] "RemoveContainer" containerID="a0ab4a9ef396cf5916cdfa7da8467bfc202253fd84272168c0d2d9da4b2770d6" Jan 30 11:15:10 crc kubenswrapper[4869]: E0130 11:15:10.728736 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0ab4a9ef396cf5916cdfa7da8467bfc202253fd84272168c0d2d9da4b2770d6\": container with ID starting with a0ab4a9ef396cf5916cdfa7da8467bfc202253fd84272168c0d2d9da4b2770d6 not found: ID does not exist" containerID="a0ab4a9ef396cf5916cdfa7da8467bfc202253fd84272168c0d2d9da4b2770d6" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.728790 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0ab4a9ef396cf5916cdfa7da8467bfc202253fd84272168c0d2d9da4b2770d6"} err="failed to get container status \"a0ab4a9ef396cf5916cdfa7da8467bfc202253fd84272168c0d2d9da4b2770d6\": rpc error: code = NotFound desc = could not find container \"a0ab4a9ef396cf5916cdfa7da8467bfc202253fd84272168c0d2d9da4b2770d6\": container with ID starting with a0ab4a9ef396cf5916cdfa7da8467bfc202253fd84272168c0d2d9da4b2770d6 not found: ID does not exist" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.728860 4869 scope.go:117] "RemoveContainer" containerID="ded3d056db1dc0adb7b3c4466ef7fa70e89362cb06f4df7f4e70b03cd8e422a3" Jan 30 11:15:10 crc kubenswrapper[4869]: E0130 11:15:10.729267 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ded3d056db1dc0adb7b3c4466ef7fa70e89362cb06f4df7f4e70b03cd8e422a3\": container with ID starting with ded3d056db1dc0adb7b3c4466ef7fa70e89362cb06f4df7f4e70b03cd8e422a3 not found: ID does not exist" containerID="ded3d056db1dc0adb7b3c4466ef7fa70e89362cb06f4df7f4e70b03cd8e422a3" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.729305 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ded3d056db1dc0adb7b3c4466ef7fa70e89362cb06f4df7f4e70b03cd8e422a3"} err="failed to get container status \"ded3d056db1dc0adb7b3c4466ef7fa70e89362cb06f4df7f4e70b03cd8e422a3\": rpc error: code = NotFound desc = could not find container \"ded3d056db1dc0adb7b3c4466ef7fa70e89362cb06f4df7f4e70b03cd8e422a3\": container with ID starting with ded3d056db1dc0adb7b3c4466ef7fa70e89362cb06f4df7f4e70b03cd8e422a3 not found: ID does not exist" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.729332 4869 scope.go:117] "RemoveContainer" containerID="5050c5a6d866ffb16dcb003916778b4d82b8d814508caa5571dd152f95be7aaa" Jan 30 11:15:10 crc kubenswrapper[4869]: E0130 11:15:10.729566 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5050c5a6d866ffb16dcb003916778b4d82b8d814508caa5571dd152f95be7aaa\": container with ID starting with 5050c5a6d866ffb16dcb003916778b4d82b8d814508caa5571dd152f95be7aaa not found: ID does not exist" containerID="5050c5a6d866ffb16dcb003916778b4d82b8d814508caa5571dd152f95be7aaa" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.729631 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5050c5a6d866ffb16dcb003916778b4d82b8d814508caa5571dd152f95be7aaa"} err="failed to get container status \"5050c5a6d866ffb16dcb003916778b4d82b8d814508caa5571dd152f95be7aaa\": rpc error: code = NotFound desc = could not find container \"5050c5a6d866ffb16dcb003916778b4d82b8d814508caa5571dd152f95be7aaa\": container with ID starting with 5050c5a6d866ffb16dcb003916778b4d82b8d814508caa5571dd152f95be7aaa not found: ID does not exist" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.865100 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbt7l\" (UniqueName: \"kubernetes.io/projected/b94e67f1-cfa7-4470-96ad-440a78a7707e-kube-api-access-qbt7l\") pod \"ceilometer-0\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.865161 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b94e67f1-cfa7-4470-96ad-440a78a7707e-run-httpd\") pod \"ceilometer-0\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.865213 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b94e67f1-cfa7-4470-96ad-440a78a7707e-log-httpd\") pod \"ceilometer-0\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.867000 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-config-data\") pod \"ceilometer-0\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.867236 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.867456 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.867612 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-scripts\") pod \"ceilometer-0\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.867639 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.968909 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.969014 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-scripts\") pod \"ceilometer-0\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.969039 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.969069 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbt7l\" (UniqueName: \"kubernetes.io/projected/b94e67f1-cfa7-4470-96ad-440a78a7707e-kube-api-access-qbt7l\") pod \"ceilometer-0\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.969100 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b94e67f1-cfa7-4470-96ad-440a78a7707e-run-httpd\") pod \"ceilometer-0\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.969138 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b94e67f1-cfa7-4470-96ad-440a78a7707e-log-httpd\") pod \"ceilometer-0\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.969171 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-config-data\") pod \"ceilometer-0\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.969260 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.970272 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b94e67f1-cfa7-4470-96ad-440a78a7707e-run-httpd\") pod \"ceilometer-0\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.970337 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b94e67f1-cfa7-4470-96ad-440a78a7707e-log-httpd\") pod \"ceilometer-0\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.974853 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.975540 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.975791 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.975914 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-config-data\") pod \"ceilometer-0\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.976791 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-scripts\") pod \"ceilometer-0\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " pod="openstack/ceilometer-0" Jan 30 11:15:10 crc kubenswrapper[4869]: I0130 11:15:10.993123 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbt7l\" (UniqueName: \"kubernetes.io/projected/b94e67f1-cfa7-4470-96ad-440a78a7707e-kube-api-access-qbt7l\") pod \"ceilometer-0\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " pod="openstack/ceilometer-0" Jan 30 11:15:11 crc kubenswrapper[4869]: I0130 11:15:11.015660 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:15:11 crc kubenswrapper[4869]: I0130 11:15:11.546352 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:15:11 crc kubenswrapper[4869]: I0130 11:15:11.625630 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b94e67f1-cfa7-4470-96ad-440a78a7707e","Type":"ContainerStarted","Data":"a40ff370ee160cb0dc495721894588c50dc0bcf1dc3537808526a9e3ba9be1b8"} Jan 30 11:15:11 crc kubenswrapper[4869]: E0130 11:15:11.769096 4869 manager.go:1116] Failed to create existing container: /kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7d2c473b_6395_43d0_98ed_a6ba38195d91.slice/crio-f304aef34cce5dd07ed2688d0b3b309f17c648f67f2d491eb57afcac80ef58a7: Error finding container f304aef34cce5dd07ed2688d0b3b309f17c648f67f2d491eb57afcac80ef58a7: Status 404 returned error can't find the container with id f304aef34cce5dd07ed2688d0b3b309f17c648f67f2d491eb57afcac80ef58a7 Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.145055 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d2c473b-6395-43d0-98ed-a6ba38195d91" path="/var/lib/kubelet/pods/7d2c473b-6395-43d0-98ed-a6ba38195d91/volumes" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.154613 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.327114 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3-combined-ca-bundle\") pod \"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3\" (UID: \"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3\") " Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.327190 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3-config-data\") pod \"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3\" (UID: \"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3\") " Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.327258 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3-logs\") pod \"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3\" (UID: \"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3\") " Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.327278 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pg9ff\" (UniqueName: \"kubernetes.io/projected/6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3-kube-api-access-pg9ff\") pod \"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3\" (UID: \"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3\") " Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.331059 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3-logs" (OuterVolumeSpecName: "logs") pod "6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3" (UID: "6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.355728 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3-kube-api-access-pg9ff" (OuterVolumeSpecName: "kube-api-access-pg9ff") pod "6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3" (UID: "6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3"). InnerVolumeSpecName "kube-api-access-pg9ff". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.415793 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3" (UID: "6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.434007 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.434039 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3-logs\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.434051 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pg9ff\" (UniqueName: \"kubernetes.io/projected/6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3-kube-api-access-pg9ff\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.445030 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3-config-data" (OuterVolumeSpecName: "config-data") pod "6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3" (UID: "6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.536925 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.635102 4869 generic.go:334] "Generic (PLEG): container finished" podID="6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3" containerID="3c92642881efe9f5809f673b2d84052880941137df94eaee2b527de3860861fb" exitCode=0 Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.635184 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3","Type":"ContainerDied","Data":"3c92642881efe9f5809f673b2d84052880941137df94eaee2b527de3860861fb"} Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.635217 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3","Type":"ContainerDied","Data":"aee265a703f18e42c34543fccfdfa2a744744a861664d42c5a3a33985e9ec71b"} Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.635185 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.635234 4869 scope.go:117] "RemoveContainer" containerID="3c92642881efe9f5809f673b2d84052880941137df94eaee2b527de3860861fb" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.636225 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b94e67f1-cfa7-4470-96ad-440a78a7707e","Type":"ContainerStarted","Data":"c37c781ec6bbb85eee14167c524d88c4bc0c5851de4283fda9feb6b69d175421"} Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.654963 4869 scope.go:117] "RemoveContainer" containerID="6dba192a329e77bd226b5fda5f266bb2509b0b42b57adc820b126e99973ffcf4" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.684917 4869 scope.go:117] "RemoveContainer" containerID="3c92642881efe9f5809f673b2d84052880941137df94eaee2b527de3860861fb" Jan 30 11:15:12 crc kubenswrapper[4869]: E0130 11:15:12.686037 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c92642881efe9f5809f673b2d84052880941137df94eaee2b527de3860861fb\": container with ID starting with 3c92642881efe9f5809f673b2d84052880941137df94eaee2b527de3860861fb not found: ID does not exist" containerID="3c92642881efe9f5809f673b2d84052880941137df94eaee2b527de3860861fb" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.686081 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c92642881efe9f5809f673b2d84052880941137df94eaee2b527de3860861fb"} err="failed to get container status \"3c92642881efe9f5809f673b2d84052880941137df94eaee2b527de3860861fb\": rpc error: code = NotFound desc = could not find container \"3c92642881efe9f5809f673b2d84052880941137df94eaee2b527de3860861fb\": container with ID starting with 3c92642881efe9f5809f673b2d84052880941137df94eaee2b527de3860861fb not found: ID does not exist" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.686109 4869 scope.go:117] "RemoveContainer" containerID="6dba192a329e77bd226b5fda5f266bb2509b0b42b57adc820b126e99973ffcf4" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.686215 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 30 11:15:12 crc kubenswrapper[4869]: E0130 11:15:12.686780 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6dba192a329e77bd226b5fda5f266bb2509b0b42b57adc820b126e99973ffcf4\": container with ID starting with 6dba192a329e77bd226b5fda5f266bb2509b0b42b57adc820b126e99973ffcf4 not found: ID does not exist" containerID="6dba192a329e77bd226b5fda5f266bb2509b0b42b57adc820b126e99973ffcf4" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.686815 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6dba192a329e77bd226b5fda5f266bb2509b0b42b57adc820b126e99973ffcf4"} err="failed to get container status \"6dba192a329e77bd226b5fda5f266bb2509b0b42b57adc820b126e99973ffcf4\": rpc error: code = NotFound desc = could not find container \"6dba192a329e77bd226b5fda5f266bb2509b0b42b57adc820b126e99973ffcf4\": container with ID starting with 6dba192a329e77bd226b5fda5f266bb2509b0b42b57adc820b126e99973ffcf4 not found: ID does not exist" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.698531 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.710480 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 30 11:15:12 crc kubenswrapper[4869]: E0130 11:15:12.711026 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3" containerName="nova-api-log" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.711049 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3" containerName="nova-api-log" Jan 30 11:15:12 crc kubenswrapper[4869]: E0130 11:15:12.711103 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3" containerName="nova-api-api" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.711112 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3" containerName="nova-api-api" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.711377 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3" containerName="nova-api-api" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.711405 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3" containerName="nova-api-log" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.712818 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.717606 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.717651 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.717834 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.717923 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.841845 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\") " pod="openstack/nova-api-0" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.841928 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-config-data\") pod \"nova-api-0\" (UID: \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\") " pod="openstack/nova-api-0" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.842008 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dcpk\" (UniqueName: \"kubernetes.io/projected/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-kube-api-access-8dcpk\") pod \"nova-api-0\" (UID: \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\") " pod="openstack/nova-api-0" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.842039 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\") " pod="openstack/nova-api-0" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.842077 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-public-tls-certs\") pod \"nova-api-0\" (UID: \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\") " pod="openstack/nova-api-0" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.842141 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-logs\") pod \"nova-api-0\" (UID: \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\") " pod="openstack/nova-api-0" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.944112 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dcpk\" (UniqueName: \"kubernetes.io/projected/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-kube-api-access-8dcpk\") pod \"nova-api-0\" (UID: \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\") " pod="openstack/nova-api-0" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.944502 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\") " pod="openstack/nova-api-0" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.944533 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-public-tls-certs\") pod \"nova-api-0\" (UID: \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\") " pod="openstack/nova-api-0" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.944625 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-logs\") pod \"nova-api-0\" (UID: \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\") " pod="openstack/nova-api-0" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.945113 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-logs\") pod \"nova-api-0\" (UID: \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\") " pod="openstack/nova-api-0" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.945196 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\") " pod="openstack/nova-api-0" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.945250 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-config-data\") pod \"nova-api-0\" (UID: \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\") " pod="openstack/nova-api-0" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.949853 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-public-tls-certs\") pod \"nova-api-0\" (UID: \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\") " pod="openstack/nova-api-0" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.949896 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\") " pod="openstack/nova-api-0" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.949970 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-config-data\") pod \"nova-api-0\" (UID: \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\") " pod="openstack/nova-api-0" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.951821 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\") " pod="openstack/nova-api-0" Jan 30 11:15:12 crc kubenswrapper[4869]: I0130 11:15:12.964260 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dcpk\" (UniqueName: \"kubernetes.io/projected/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-kube-api-access-8dcpk\") pod \"nova-api-0\" (UID: \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\") " pod="openstack/nova-api-0" Jan 30 11:15:13 crc kubenswrapper[4869]: I0130 11:15:13.064952 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 11:15:13 crc kubenswrapper[4869]: I0130 11:15:13.263432 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:15:13 crc kubenswrapper[4869]: I0130 11:15:13.297958 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:15:13 crc kubenswrapper[4869]: I0130 11:15:13.561400 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 30 11:15:13 crc kubenswrapper[4869]: W0130 11:15:13.566468 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7693e7f6_379b_4d43_88be_9bdc6a5c0a2b.slice/crio-4157f83d6000741dcf3548a8d7a66b41e7bdcced1c76347853696fd9af879b61 WatchSource:0}: Error finding container 4157f83d6000741dcf3548a8d7a66b41e7bdcced1c76347853696fd9af879b61: Status 404 returned error can't find the container with id 4157f83d6000741dcf3548a8d7a66b41e7bdcced1c76347853696fd9af879b61 Jan 30 11:15:13 crc kubenswrapper[4869]: I0130 11:15:13.649305 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b94e67f1-cfa7-4470-96ad-440a78a7707e","Type":"ContainerStarted","Data":"27b5ea189bd4822ad549497c90e0af47c89f4bcaf3a147f407784ea7d6d2c6c9"} Jan 30 11:15:13 crc kubenswrapper[4869]: I0130 11:15:13.649365 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b94e67f1-cfa7-4470-96ad-440a78a7707e","Type":"ContainerStarted","Data":"2df8b1eba74a41c2b063c8fbcf197a06c9966c6a5ba096d492ead9e41ab7aa2e"} Jan 30 11:15:13 crc kubenswrapper[4869]: I0130 11:15:13.653833 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b","Type":"ContainerStarted","Data":"4157f83d6000741dcf3548a8d7a66b41e7bdcced1c76347853696fd9af879b61"} Jan 30 11:15:13 crc kubenswrapper[4869]: I0130 11:15:13.670522 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:15:13 crc kubenswrapper[4869]: I0130 11:15:13.921539 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-ngc5k"] Jan 30 11:15:13 crc kubenswrapper[4869]: I0130 11:15:13.922789 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-ngc5k" Jan 30 11:15:13 crc kubenswrapper[4869]: I0130 11:15:13.926185 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Jan 30 11:15:13 crc kubenswrapper[4869]: I0130 11:15:13.926402 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Jan 30 11:15:13 crc kubenswrapper[4869]: I0130 11:15:13.933016 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-ngc5k"] Jan 30 11:15:14 crc kubenswrapper[4869]: I0130 11:15:14.075981 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/848bfccf-b69f-4c76-8ae1-2dd0e7203cb8-scripts\") pod \"nova-cell1-cell-mapping-ngc5k\" (UID: \"848bfccf-b69f-4c76-8ae1-2dd0e7203cb8\") " pod="openstack/nova-cell1-cell-mapping-ngc5k" Jan 30 11:15:14 crc kubenswrapper[4869]: I0130 11:15:14.076465 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6bw2\" (UniqueName: \"kubernetes.io/projected/848bfccf-b69f-4c76-8ae1-2dd0e7203cb8-kube-api-access-d6bw2\") pod \"nova-cell1-cell-mapping-ngc5k\" (UID: \"848bfccf-b69f-4c76-8ae1-2dd0e7203cb8\") " pod="openstack/nova-cell1-cell-mapping-ngc5k" Jan 30 11:15:14 crc kubenswrapper[4869]: I0130 11:15:14.076598 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/848bfccf-b69f-4c76-8ae1-2dd0e7203cb8-config-data\") pod \"nova-cell1-cell-mapping-ngc5k\" (UID: \"848bfccf-b69f-4c76-8ae1-2dd0e7203cb8\") " pod="openstack/nova-cell1-cell-mapping-ngc5k" Jan 30 11:15:14 crc kubenswrapper[4869]: I0130 11:15:14.076642 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/848bfccf-b69f-4c76-8ae1-2dd0e7203cb8-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-ngc5k\" (UID: \"848bfccf-b69f-4c76-8ae1-2dd0e7203cb8\") " pod="openstack/nova-cell1-cell-mapping-ngc5k" Jan 30 11:15:14 crc kubenswrapper[4869]: I0130 11:15:14.143650 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3" path="/var/lib/kubelet/pods/6fcdb9d5-e409-4d89-a7a9-dc2e2ee022d3/volumes" Jan 30 11:15:14 crc kubenswrapper[4869]: I0130 11:15:14.178575 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6bw2\" (UniqueName: \"kubernetes.io/projected/848bfccf-b69f-4c76-8ae1-2dd0e7203cb8-kube-api-access-d6bw2\") pod \"nova-cell1-cell-mapping-ngc5k\" (UID: \"848bfccf-b69f-4c76-8ae1-2dd0e7203cb8\") " pod="openstack/nova-cell1-cell-mapping-ngc5k" Jan 30 11:15:14 crc kubenswrapper[4869]: I0130 11:15:14.178687 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/848bfccf-b69f-4c76-8ae1-2dd0e7203cb8-config-data\") pod \"nova-cell1-cell-mapping-ngc5k\" (UID: \"848bfccf-b69f-4c76-8ae1-2dd0e7203cb8\") " pod="openstack/nova-cell1-cell-mapping-ngc5k" Jan 30 11:15:14 crc kubenswrapper[4869]: I0130 11:15:14.178729 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/848bfccf-b69f-4c76-8ae1-2dd0e7203cb8-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-ngc5k\" (UID: \"848bfccf-b69f-4c76-8ae1-2dd0e7203cb8\") " pod="openstack/nova-cell1-cell-mapping-ngc5k" Jan 30 11:15:14 crc kubenswrapper[4869]: I0130 11:15:14.178766 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/848bfccf-b69f-4c76-8ae1-2dd0e7203cb8-scripts\") pod \"nova-cell1-cell-mapping-ngc5k\" (UID: \"848bfccf-b69f-4c76-8ae1-2dd0e7203cb8\") " pod="openstack/nova-cell1-cell-mapping-ngc5k" Jan 30 11:15:14 crc kubenswrapper[4869]: I0130 11:15:14.183852 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/848bfccf-b69f-4c76-8ae1-2dd0e7203cb8-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-ngc5k\" (UID: \"848bfccf-b69f-4c76-8ae1-2dd0e7203cb8\") " pod="openstack/nova-cell1-cell-mapping-ngc5k" Jan 30 11:15:14 crc kubenswrapper[4869]: I0130 11:15:14.184384 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/848bfccf-b69f-4c76-8ae1-2dd0e7203cb8-scripts\") pod \"nova-cell1-cell-mapping-ngc5k\" (UID: \"848bfccf-b69f-4c76-8ae1-2dd0e7203cb8\") " pod="openstack/nova-cell1-cell-mapping-ngc5k" Jan 30 11:15:14 crc kubenswrapper[4869]: I0130 11:15:14.192852 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/848bfccf-b69f-4c76-8ae1-2dd0e7203cb8-config-data\") pod \"nova-cell1-cell-mapping-ngc5k\" (UID: \"848bfccf-b69f-4c76-8ae1-2dd0e7203cb8\") " pod="openstack/nova-cell1-cell-mapping-ngc5k" Jan 30 11:15:14 crc kubenswrapper[4869]: I0130 11:15:14.201545 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6bw2\" (UniqueName: \"kubernetes.io/projected/848bfccf-b69f-4c76-8ae1-2dd0e7203cb8-kube-api-access-d6bw2\") pod \"nova-cell1-cell-mapping-ngc5k\" (UID: \"848bfccf-b69f-4c76-8ae1-2dd0e7203cb8\") " pod="openstack/nova-cell1-cell-mapping-ngc5k" Jan 30 11:15:14 crc kubenswrapper[4869]: I0130 11:15:14.318641 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-ngc5k" Jan 30 11:15:14 crc kubenswrapper[4869]: I0130 11:15:14.663481 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b","Type":"ContainerStarted","Data":"d22acd0e37c1d16b48642ba07101001e0b5da4e0b87c4c3ff79fd376c6a9907a"} Jan 30 11:15:14 crc kubenswrapper[4869]: I0130 11:15:14.663834 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b","Type":"ContainerStarted","Data":"e4c4918f5662f8b540d7b476d1bcefa42040a6d2343b28c5333643103e016655"} Jan 30 11:15:14 crc kubenswrapper[4869]: I0130 11:15:14.685745 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.685720515 podStartE2EDuration="2.685720515s" podCreationTimestamp="2026-01-30 11:15:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:15:14.682593077 +0000 UTC m=+1265.232469163" watchObservedRunningTime="2026-01-30 11:15:14.685720515 +0000 UTC m=+1265.235596581" Jan 30 11:15:14 crc kubenswrapper[4869]: I0130 11:15:14.782359 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-ngc5k"] Jan 30 11:15:14 crc kubenswrapper[4869]: W0130 11:15:14.791130 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod848bfccf_b69f_4c76_8ae1_2dd0e7203cb8.slice/crio-599ead84c05b5820c1b1afba44e69c1b0729405f5bbf59c6b0a74189f5200a66 WatchSource:0}: Error finding container 599ead84c05b5820c1b1afba44e69c1b0729405f5bbf59c6b0a74189f5200a66: Status 404 returned error can't find the container with id 599ead84c05b5820c1b1afba44e69c1b0729405f5bbf59c6b0a74189f5200a66 Jan 30 11:15:15 crc kubenswrapper[4869]: I0130 11:15:15.676823 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-ngc5k" event={"ID":"848bfccf-b69f-4c76-8ae1-2dd0e7203cb8","Type":"ContainerStarted","Data":"6f648066bc5ecfa8412c587c889d17f50834350c9a5cabdd2ecd078d9aba434c"} Jan 30 11:15:15 crc kubenswrapper[4869]: I0130 11:15:15.677258 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-ngc5k" event={"ID":"848bfccf-b69f-4c76-8ae1-2dd0e7203cb8","Type":"ContainerStarted","Data":"599ead84c05b5820c1b1afba44e69c1b0729405f5bbf59c6b0a74189f5200a66"} Jan 30 11:15:15 crc kubenswrapper[4869]: I0130 11:15:15.702213 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-ngc5k" podStartSLOduration=2.702193345 podStartE2EDuration="2.702193345s" podCreationTimestamp="2026-01-30 11:15:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:15:15.701267849 +0000 UTC m=+1266.251143935" watchObservedRunningTime="2026-01-30 11:15:15.702193345 +0000 UTC m=+1266.252069411" Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.027950 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.091918 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-2jdxp"] Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.093125 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" podUID="5c574192-b2fc-42ad-980f-ca5d42f51ac7" containerName="dnsmasq-dns" containerID="cri-o://e85400675ab49c958a87fc830f35df0dee7d933272c59fcdb8ba4741cb390fd8" gracePeriod=10 Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.687658 4869 generic.go:334] "Generic (PLEG): container finished" podID="5c574192-b2fc-42ad-980f-ca5d42f51ac7" containerID="e85400675ab49c958a87fc830f35df0dee7d933272c59fcdb8ba4741cb390fd8" exitCode=0 Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.687727 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" event={"ID":"5c574192-b2fc-42ad-980f-ca5d42f51ac7","Type":"ContainerDied","Data":"e85400675ab49c958a87fc830f35df0dee7d933272c59fcdb8ba4741cb390fd8"} Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.688113 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" event={"ID":"5c574192-b2fc-42ad-980f-ca5d42f51ac7","Type":"ContainerDied","Data":"dccec6ed2867a6c4ae2b0fd65764eb562f8d62dc3f17b068b29100504d861b20"} Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.688135 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dccec6ed2867a6c4ae2b0fd65764eb562f8d62dc3f17b068b29100504d861b20" Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.692007 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b94e67f1-cfa7-4470-96ad-440a78a7707e","Type":"ContainerStarted","Data":"d6f67b2e1962982646c99be9310fe46368582436c0f28ba1d79bd9af395475fc"} Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.708776 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.744415 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.732027905 podStartE2EDuration="6.744396295s" podCreationTimestamp="2026-01-30 11:15:10 +0000 UTC" firstStartedPulling="2026-01-30 11:15:11.545778197 +0000 UTC m=+1262.095654263" lastFinishedPulling="2026-01-30 11:15:15.558146587 +0000 UTC m=+1266.108022653" observedRunningTime="2026-01-30 11:15:16.71641131 +0000 UTC m=+1267.266287406" watchObservedRunningTime="2026-01-30 11:15:16.744396295 +0000 UTC m=+1267.294272361" Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.837538 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-ovsdbserver-nb\") pod \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\" (UID: \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\") " Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.837630 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-dns-svc\") pod \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\" (UID: \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\") " Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.837716 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sk8jz\" (UniqueName: \"kubernetes.io/projected/5c574192-b2fc-42ad-980f-ca5d42f51ac7-kube-api-access-sk8jz\") pod \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\" (UID: \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\") " Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.837745 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-config\") pod \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\" (UID: \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\") " Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.837809 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-dns-swift-storage-0\") pod \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\" (UID: \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\") " Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.837829 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-ovsdbserver-sb\") pod \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\" (UID: \"5c574192-b2fc-42ad-980f-ca5d42f51ac7\") " Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.846681 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c574192-b2fc-42ad-980f-ca5d42f51ac7-kube-api-access-sk8jz" (OuterVolumeSpecName: "kube-api-access-sk8jz") pod "5c574192-b2fc-42ad-980f-ca5d42f51ac7" (UID: "5c574192-b2fc-42ad-980f-ca5d42f51ac7"). InnerVolumeSpecName "kube-api-access-sk8jz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.895541 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5c574192-b2fc-42ad-980f-ca5d42f51ac7" (UID: "5c574192-b2fc-42ad-980f-ca5d42f51ac7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.904057 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5c574192-b2fc-42ad-980f-ca5d42f51ac7" (UID: "5c574192-b2fc-42ad-980f-ca5d42f51ac7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.904949 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-config" (OuterVolumeSpecName: "config") pod "5c574192-b2fc-42ad-980f-ca5d42f51ac7" (UID: "5c574192-b2fc-42ad-980f-ca5d42f51ac7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.923170 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5c574192-b2fc-42ad-980f-ca5d42f51ac7" (UID: "5c574192-b2fc-42ad-980f-ca5d42f51ac7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.940203 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.940245 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.940259 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sk8jz\" (UniqueName: \"kubernetes.io/projected/5c574192-b2fc-42ad-980f-ca5d42f51ac7-kube-api-access-sk8jz\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.940272 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.940283 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:16 crc kubenswrapper[4869]: I0130 11:15:16.945407 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5c574192-b2fc-42ad-980f-ca5d42f51ac7" (UID: "5c574192-b2fc-42ad-980f-ca5d42f51ac7"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:15:17 crc kubenswrapper[4869]: I0130 11:15:17.041481 4869 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5c574192-b2fc-42ad-980f-ca5d42f51ac7-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:17 crc kubenswrapper[4869]: I0130 11:15:17.699258 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 30 11:15:17 crc kubenswrapper[4869]: I0130 11:15:17.699548 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-2jdxp" Jan 30 11:15:17 crc kubenswrapper[4869]: I0130 11:15:17.740818 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-2jdxp"] Jan 30 11:15:17 crc kubenswrapper[4869]: I0130 11:15:17.752839 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-2jdxp"] Jan 30 11:15:18 crc kubenswrapper[4869]: I0130 11:15:18.143998 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c574192-b2fc-42ad-980f-ca5d42f51ac7" path="/var/lib/kubelet/pods/5c574192-b2fc-42ad-980f-ca5d42f51ac7/volumes" Jan 30 11:15:20 crc kubenswrapper[4869]: I0130 11:15:20.723125 4869 generic.go:334] "Generic (PLEG): container finished" podID="848bfccf-b69f-4c76-8ae1-2dd0e7203cb8" containerID="6f648066bc5ecfa8412c587c889d17f50834350c9a5cabdd2ecd078d9aba434c" exitCode=0 Jan 30 11:15:20 crc kubenswrapper[4869]: I0130 11:15:20.723444 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-ngc5k" event={"ID":"848bfccf-b69f-4c76-8ae1-2dd0e7203cb8","Type":"ContainerDied","Data":"6f648066bc5ecfa8412c587c889d17f50834350c9a5cabdd2ecd078d9aba434c"} Jan 30 11:15:21 crc kubenswrapper[4869]: I0130 11:15:21.769168 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:15:21 crc kubenswrapper[4869]: I0130 11:15:21.770429 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:15:21 crc kubenswrapper[4869]: I0130 11:15:21.770627 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 11:15:21 crc kubenswrapper[4869]: I0130 11:15:21.771967 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2d30cb107250bb27f981051333540bce0f94a645c35535aa9330fd41a7dff2ba"} pod="openshift-machine-config-operator/machine-config-daemon-99lr2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 11:15:21 crc kubenswrapper[4869]: I0130 11:15:21.772197 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" containerID="cri-o://2d30cb107250bb27f981051333540bce0f94a645c35535aa9330fd41a7dff2ba" gracePeriod=600 Jan 30 11:15:22 crc kubenswrapper[4869]: I0130 11:15:22.125685 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-ngc5k" Jan 30 11:15:22 crc kubenswrapper[4869]: I0130 11:15:22.235070 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/848bfccf-b69f-4c76-8ae1-2dd0e7203cb8-combined-ca-bundle\") pod \"848bfccf-b69f-4c76-8ae1-2dd0e7203cb8\" (UID: \"848bfccf-b69f-4c76-8ae1-2dd0e7203cb8\") " Jan 30 11:15:22 crc kubenswrapper[4869]: I0130 11:15:22.235308 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/848bfccf-b69f-4c76-8ae1-2dd0e7203cb8-scripts\") pod \"848bfccf-b69f-4c76-8ae1-2dd0e7203cb8\" (UID: \"848bfccf-b69f-4c76-8ae1-2dd0e7203cb8\") " Jan 30 11:15:22 crc kubenswrapper[4869]: I0130 11:15:22.235364 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6bw2\" (UniqueName: \"kubernetes.io/projected/848bfccf-b69f-4c76-8ae1-2dd0e7203cb8-kube-api-access-d6bw2\") pod \"848bfccf-b69f-4c76-8ae1-2dd0e7203cb8\" (UID: \"848bfccf-b69f-4c76-8ae1-2dd0e7203cb8\") " Jan 30 11:15:22 crc kubenswrapper[4869]: I0130 11:15:22.235408 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/848bfccf-b69f-4c76-8ae1-2dd0e7203cb8-config-data\") pod \"848bfccf-b69f-4c76-8ae1-2dd0e7203cb8\" (UID: \"848bfccf-b69f-4c76-8ae1-2dd0e7203cb8\") " Jan 30 11:15:22 crc kubenswrapper[4869]: I0130 11:15:22.241590 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/848bfccf-b69f-4c76-8ae1-2dd0e7203cb8-kube-api-access-d6bw2" (OuterVolumeSpecName: "kube-api-access-d6bw2") pod "848bfccf-b69f-4c76-8ae1-2dd0e7203cb8" (UID: "848bfccf-b69f-4c76-8ae1-2dd0e7203cb8"). InnerVolumeSpecName "kube-api-access-d6bw2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:15:22 crc kubenswrapper[4869]: I0130 11:15:22.244853 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/848bfccf-b69f-4c76-8ae1-2dd0e7203cb8-scripts" (OuterVolumeSpecName: "scripts") pod "848bfccf-b69f-4c76-8ae1-2dd0e7203cb8" (UID: "848bfccf-b69f-4c76-8ae1-2dd0e7203cb8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:15:22 crc kubenswrapper[4869]: I0130 11:15:22.265159 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/848bfccf-b69f-4c76-8ae1-2dd0e7203cb8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "848bfccf-b69f-4c76-8ae1-2dd0e7203cb8" (UID: "848bfccf-b69f-4c76-8ae1-2dd0e7203cb8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:15:22 crc kubenswrapper[4869]: I0130 11:15:22.272772 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/848bfccf-b69f-4c76-8ae1-2dd0e7203cb8-config-data" (OuterVolumeSpecName: "config-data") pod "848bfccf-b69f-4c76-8ae1-2dd0e7203cb8" (UID: "848bfccf-b69f-4c76-8ae1-2dd0e7203cb8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:15:22 crc kubenswrapper[4869]: I0130 11:15:22.360249 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/848bfccf-b69f-4c76-8ae1-2dd0e7203cb8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:22 crc kubenswrapper[4869]: I0130 11:15:22.360296 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/848bfccf-b69f-4c76-8ae1-2dd0e7203cb8-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:22 crc kubenswrapper[4869]: I0130 11:15:22.360317 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6bw2\" (UniqueName: \"kubernetes.io/projected/848bfccf-b69f-4c76-8ae1-2dd0e7203cb8-kube-api-access-d6bw2\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:22 crc kubenswrapper[4869]: I0130 11:15:22.360331 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/848bfccf-b69f-4c76-8ae1-2dd0e7203cb8-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:22 crc kubenswrapper[4869]: I0130 11:15:22.745868 4869 generic.go:334] "Generic (PLEG): container finished" podID="ef13186b-7f82-4025-97e3-d899be8c207f" containerID="2d30cb107250bb27f981051333540bce0f94a645c35535aa9330fd41a7dff2ba" exitCode=0 Jan 30 11:15:22 crc kubenswrapper[4869]: I0130 11:15:22.746010 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerDied","Data":"2d30cb107250bb27f981051333540bce0f94a645c35535aa9330fd41a7dff2ba"} Jan 30 11:15:22 crc kubenswrapper[4869]: I0130 11:15:22.746495 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerStarted","Data":"cabcf877f44bd0de25c7db6df0dc9b22d4324a2881d9c97569218164d3a2997b"} Jan 30 11:15:22 crc kubenswrapper[4869]: I0130 11:15:22.746521 4869 scope.go:117] "RemoveContainer" containerID="4ad08bd30e3f392ec90da3d9d390933399838d7e5f9e5bf41ffe126d804a3058" Jan 30 11:15:22 crc kubenswrapper[4869]: I0130 11:15:22.748812 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-ngc5k" event={"ID":"848bfccf-b69f-4c76-8ae1-2dd0e7203cb8","Type":"ContainerDied","Data":"599ead84c05b5820c1b1afba44e69c1b0729405f5bbf59c6b0a74189f5200a66"} Jan 30 11:15:22 crc kubenswrapper[4869]: I0130 11:15:22.748844 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="599ead84c05b5820c1b1afba44e69c1b0729405f5bbf59c6b0a74189f5200a66" Jan 30 11:15:22 crc kubenswrapper[4869]: I0130 11:15:22.748933 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-ngc5k" Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.008004 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.008668 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="a01e50c7-dd24-4042-a0d9-58a62a6c946f" containerName="nova-scheduler-scheduler" containerID="cri-o://ceb876878c7482e1d74a2c9adf2c683331ab53a5dc082b5cf08e6f3b6628c4c0" gracePeriod=30 Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.021677 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.022130 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7693e7f6-379b-4d43-88be-9bdc6a5c0a2b" containerName="nova-api-log" containerID="cri-o://e4c4918f5662f8b540d7b476d1bcefa42040a6d2343b28c5333643103e016655" gracePeriod=30 Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.022210 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7693e7f6-379b-4d43-88be-9bdc6a5c0a2b" containerName="nova-api-api" containerID="cri-o://d22acd0e37c1d16b48642ba07101001e0b5da4e0b87c4c3ff79fd376c6a9907a" gracePeriod=30 Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.033116 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.034519 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5a9a5962-e1b8-48ff-86c2-6464d47c9077" containerName="nova-metadata-log" containerID="cri-o://845fdb029cafa9a7661b2b2962c2e99d40dab66762f37373702c79fa3717824b" gracePeriod=30 Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.034599 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5a9a5962-e1b8-48ff-86c2-6464d47c9077" containerName="nova-metadata-metadata" containerID="cri-o://335df5527f2d4ffccc0419d6660dad38be781629607e030a9dd58a70dc5f8588" gracePeriod=30 Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.762151 4869 generic.go:334] "Generic (PLEG): container finished" podID="7693e7f6-379b-4d43-88be-9bdc6a5c0a2b" containerID="d22acd0e37c1d16b48642ba07101001e0b5da4e0b87c4c3ff79fd376c6a9907a" exitCode=0 Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.762574 4869 generic.go:334] "Generic (PLEG): container finished" podID="7693e7f6-379b-4d43-88be-9bdc6a5c0a2b" containerID="e4c4918f5662f8b540d7b476d1bcefa42040a6d2343b28c5333643103e016655" exitCode=143 Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.762299 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b","Type":"ContainerDied","Data":"d22acd0e37c1d16b48642ba07101001e0b5da4e0b87c4c3ff79fd376c6a9907a"} Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.762783 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b","Type":"ContainerDied","Data":"e4c4918f5662f8b540d7b476d1bcefa42040a6d2343b28c5333643103e016655"} Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.762851 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b","Type":"ContainerDied","Data":"4157f83d6000741dcf3548a8d7a66b41e7bdcced1c76347853696fd9af879b61"} Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.762911 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4157f83d6000741dcf3548a8d7a66b41e7bdcced1c76347853696fd9af879b61" Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.766313 4869 generic.go:334] "Generic (PLEG): container finished" podID="5a9a5962-e1b8-48ff-86c2-6464d47c9077" containerID="845fdb029cafa9a7661b2b2962c2e99d40dab66762f37373702c79fa3717824b" exitCode=143 Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.766471 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5a9a5962-e1b8-48ff-86c2-6464d47c9077","Type":"ContainerDied","Data":"845fdb029cafa9a7661b2b2962c2e99d40dab66762f37373702c79fa3717824b"} Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.768359 4869 generic.go:334] "Generic (PLEG): container finished" podID="a01e50c7-dd24-4042-a0d9-58a62a6c946f" containerID="ceb876878c7482e1d74a2c9adf2c683331ab53a5dc082b5cf08e6f3b6628c4c0" exitCode=0 Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.768394 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a01e50c7-dd24-4042-a0d9-58a62a6c946f","Type":"ContainerDied","Data":"ceb876878c7482e1d74a2c9adf2c683331ab53a5dc082b5cf08e6f3b6628c4c0"} Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.768651 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.902011 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-public-tls-certs\") pod \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\" (UID: \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\") " Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.902160 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-combined-ca-bundle\") pod \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\" (UID: \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\") " Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.902268 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-config-data\") pod \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\" (UID: \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\") " Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.902430 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-logs\") pod \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\" (UID: \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\") " Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.902573 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8dcpk\" (UniqueName: \"kubernetes.io/projected/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-kube-api-access-8dcpk\") pod \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\" (UID: \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\") " Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.902678 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-internal-tls-certs\") pod \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\" (UID: \"7693e7f6-379b-4d43-88be-9bdc6a5c0a2b\") " Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.903164 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-logs" (OuterVolumeSpecName: "logs") pod "7693e7f6-379b-4d43-88be-9bdc6a5c0a2b" (UID: "7693e7f6-379b-4d43-88be-9bdc6a5c0a2b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.904675 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-logs\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.915225 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-kube-api-access-8dcpk" (OuterVolumeSpecName: "kube-api-access-8dcpk") pod "7693e7f6-379b-4d43-88be-9bdc6a5c0a2b" (UID: "7693e7f6-379b-4d43-88be-9bdc6a5c0a2b"). InnerVolumeSpecName "kube-api-access-8dcpk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.932733 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-config-data" (OuterVolumeSpecName: "config-data") pod "7693e7f6-379b-4d43-88be-9bdc6a5c0a2b" (UID: "7693e7f6-379b-4d43-88be-9bdc6a5c0a2b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.936399 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7693e7f6-379b-4d43-88be-9bdc6a5c0a2b" (UID: "7693e7f6-379b-4d43-88be-9bdc6a5c0a2b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:15:23 crc kubenswrapper[4869]: I0130 11:15:23.984000 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "7693e7f6-379b-4d43-88be-9bdc6a5c0a2b" (UID: "7693e7f6-379b-4d43-88be-9bdc6a5c0a2b"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.005465 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "7693e7f6-379b-4d43-88be-9bdc6a5c0a2b" (UID: "7693e7f6-379b-4d43-88be-9bdc6a5c0a2b"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.006564 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8dcpk\" (UniqueName: \"kubernetes.io/projected/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-kube-api-access-8dcpk\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.006609 4869 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.006626 4869 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.006640 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.006653 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.058417 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.208932 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2jnvq\" (UniqueName: \"kubernetes.io/projected/a01e50c7-dd24-4042-a0d9-58a62a6c946f-kube-api-access-2jnvq\") pod \"a01e50c7-dd24-4042-a0d9-58a62a6c946f\" (UID: \"a01e50c7-dd24-4042-a0d9-58a62a6c946f\") " Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.208997 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a01e50c7-dd24-4042-a0d9-58a62a6c946f-combined-ca-bundle\") pod \"a01e50c7-dd24-4042-a0d9-58a62a6c946f\" (UID: \"a01e50c7-dd24-4042-a0d9-58a62a6c946f\") " Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.209139 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a01e50c7-dd24-4042-a0d9-58a62a6c946f-config-data\") pod \"a01e50c7-dd24-4042-a0d9-58a62a6c946f\" (UID: \"a01e50c7-dd24-4042-a0d9-58a62a6c946f\") " Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.215346 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a01e50c7-dd24-4042-a0d9-58a62a6c946f-kube-api-access-2jnvq" (OuterVolumeSpecName: "kube-api-access-2jnvq") pod "a01e50c7-dd24-4042-a0d9-58a62a6c946f" (UID: "a01e50c7-dd24-4042-a0d9-58a62a6c946f"). InnerVolumeSpecName "kube-api-access-2jnvq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.240729 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a01e50c7-dd24-4042-a0d9-58a62a6c946f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a01e50c7-dd24-4042-a0d9-58a62a6c946f" (UID: "a01e50c7-dd24-4042-a0d9-58a62a6c946f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.244623 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a01e50c7-dd24-4042-a0d9-58a62a6c946f-config-data" (OuterVolumeSpecName: "config-data") pod "a01e50c7-dd24-4042-a0d9-58a62a6c946f" (UID: "a01e50c7-dd24-4042-a0d9-58a62a6c946f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.313158 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2jnvq\" (UniqueName: \"kubernetes.io/projected/a01e50c7-dd24-4042-a0d9-58a62a6c946f-kube-api-access-2jnvq\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.313190 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a01e50c7-dd24-4042-a0d9-58a62a6c946f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.313226 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a01e50c7-dd24-4042-a0d9-58a62a6c946f-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.778477 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.778463 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a01e50c7-dd24-4042-a0d9-58a62a6c946f","Type":"ContainerDied","Data":"aca136a6f6adfb6530797a2d3b2bb8a047dab95a24798eed4dca95d03e23a5c2"} Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.778884 4869 scope.go:117] "RemoveContainer" containerID="ceb876878c7482e1d74a2c9adf2c683331ab53a5dc082b5cf08e6f3b6628c4c0" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.778482 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.808188 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.821745 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.839761 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.848435 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 30 11:15:24 crc kubenswrapper[4869]: E0130 11:15:24.848917 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c574192-b2fc-42ad-980f-ca5d42f51ac7" containerName="dnsmasq-dns" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.848935 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c574192-b2fc-42ad-980f-ca5d42f51ac7" containerName="dnsmasq-dns" Jan 30 11:15:24 crc kubenswrapper[4869]: E0130 11:15:24.848959 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c574192-b2fc-42ad-980f-ca5d42f51ac7" containerName="init" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.848966 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c574192-b2fc-42ad-980f-ca5d42f51ac7" containerName="init" Jan 30 11:15:24 crc kubenswrapper[4869]: E0130 11:15:24.848979 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7693e7f6-379b-4d43-88be-9bdc6a5c0a2b" containerName="nova-api-log" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.848985 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7693e7f6-379b-4d43-88be-9bdc6a5c0a2b" containerName="nova-api-log" Jan 30 11:15:24 crc kubenswrapper[4869]: E0130 11:15:24.848994 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7693e7f6-379b-4d43-88be-9bdc6a5c0a2b" containerName="nova-api-api" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.849000 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7693e7f6-379b-4d43-88be-9bdc6a5c0a2b" containerName="nova-api-api" Jan 30 11:15:24 crc kubenswrapper[4869]: E0130 11:15:24.849006 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="848bfccf-b69f-4c76-8ae1-2dd0e7203cb8" containerName="nova-manage" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.849011 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="848bfccf-b69f-4c76-8ae1-2dd0e7203cb8" containerName="nova-manage" Jan 30 11:15:24 crc kubenswrapper[4869]: E0130 11:15:24.849028 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a01e50c7-dd24-4042-a0d9-58a62a6c946f" containerName="nova-scheduler-scheduler" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.849034 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a01e50c7-dd24-4042-a0d9-58a62a6c946f" containerName="nova-scheduler-scheduler" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.849226 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7693e7f6-379b-4d43-88be-9bdc6a5c0a2b" containerName="nova-api-api" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.849248 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c574192-b2fc-42ad-980f-ca5d42f51ac7" containerName="dnsmasq-dns" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.849265 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7693e7f6-379b-4d43-88be-9bdc6a5c0a2b" containerName="nova-api-log" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.849272 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a01e50c7-dd24-4042-a0d9-58a62a6c946f" containerName="nova-scheduler-scheduler" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.849284 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="848bfccf-b69f-4c76-8ae1-2dd0e7203cb8" containerName="nova-manage" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.850289 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.855349 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.855690 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.859917 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.861598 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.869856 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.879337 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.880641 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.895689 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 30 11:15:24 crc kubenswrapper[4869]: I0130 11:15:24.920677 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.028197 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f7d516c-1685-4033-891f-64008f56a468-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4f7d516c-1685-4033-891f-64008f56a468\") " pod="openstack/nova-api-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.028283 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f7d516c-1685-4033-891f-64008f56a468-config-data\") pod \"nova-api-0\" (UID: \"4f7d516c-1685-4033-891f-64008f56a468\") " pod="openstack/nova-api-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.028433 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f954356-d9a2-4183-9033-adf859e722e4-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0f954356-d9a2-4183-9033-adf859e722e4\") " pod="openstack/nova-scheduler-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.028472 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f7d516c-1685-4033-891f-64008f56a468-logs\") pod \"nova-api-0\" (UID: \"4f7d516c-1685-4033-891f-64008f56a468\") " pod="openstack/nova-api-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.028499 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pp7g\" (UniqueName: \"kubernetes.io/projected/4f7d516c-1685-4033-891f-64008f56a468-kube-api-access-6pp7g\") pod \"nova-api-0\" (UID: \"4f7d516c-1685-4033-891f-64008f56a468\") " pod="openstack/nova-api-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.028538 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f7d516c-1685-4033-891f-64008f56a468-public-tls-certs\") pod \"nova-api-0\" (UID: \"4f7d516c-1685-4033-891f-64008f56a468\") " pod="openstack/nova-api-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.028686 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f7d516c-1685-4033-891f-64008f56a468-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4f7d516c-1685-4033-891f-64008f56a468\") " pod="openstack/nova-api-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.028766 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f954356-d9a2-4183-9033-adf859e722e4-config-data\") pod \"nova-scheduler-0\" (UID: \"0f954356-d9a2-4183-9033-adf859e722e4\") " pod="openstack/nova-scheduler-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.028844 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrprx\" (UniqueName: \"kubernetes.io/projected/0f954356-d9a2-4183-9033-adf859e722e4-kube-api-access-xrprx\") pod \"nova-scheduler-0\" (UID: \"0f954356-d9a2-4183-9033-adf859e722e4\") " pod="openstack/nova-scheduler-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.130615 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f7d516c-1685-4033-891f-64008f56a468-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4f7d516c-1685-4033-891f-64008f56a468\") " pod="openstack/nova-api-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.130698 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f7d516c-1685-4033-891f-64008f56a468-config-data\") pod \"nova-api-0\" (UID: \"4f7d516c-1685-4033-891f-64008f56a468\") " pod="openstack/nova-api-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.130793 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f954356-d9a2-4183-9033-adf859e722e4-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0f954356-d9a2-4183-9033-adf859e722e4\") " pod="openstack/nova-scheduler-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.130824 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f7d516c-1685-4033-891f-64008f56a468-logs\") pod \"nova-api-0\" (UID: \"4f7d516c-1685-4033-891f-64008f56a468\") " pod="openstack/nova-api-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.130848 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pp7g\" (UniqueName: \"kubernetes.io/projected/4f7d516c-1685-4033-891f-64008f56a468-kube-api-access-6pp7g\") pod \"nova-api-0\" (UID: \"4f7d516c-1685-4033-891f-64008f56a468\") " pod="openstack/nova-api-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.130868 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f7d516c-1685-4033-891f-64008f56a468-public-tls-certs\") pod \"nova-api-0\" (UID: \"4f7d516c-1685-4033-891f-64008f56a468\") " pod="openstack/nova-api-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.130951 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f7d516c-1685-4033-891f-64008f56a468-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4f7d516c-1685-4033-891f-64008f56a468\") " pod="openstack/nova-api-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.130982 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f954356-d9a2-4183-9033-adf859e722e4-config-data\") pod \"nova-scheduler-0\" (UID: \"0f954356-d9a2-4183-9033-adf859e722e4\") " pod="openstack/nova-scheduler-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.131027 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrprx\" (UniqueName: \"kubernetes.io/projected/0f954356-d9a2-4183-9033-adf859e722e4-kube-api-access-xrprx\") pod \"nova-scheduler-0\" (UID: \"0f954356-d9a2-4183-9033-adf859e722e4\") " pod="openstack/nova-scheduler-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.131405 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f7d516c-1685-4033-891f-64008f56a468-logs\") pod \"nova-api-0\" (UID: \"4f7d516c-1685-4033-891f-64008f56a468\") " pod="openstack/nova-api-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.135851 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f7d516c-1685-4033-891f-64008f56a468-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4f7d516c-1685-4033-891f-64008f56a468\") " pod="openstack/nova-api-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.136071 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f7d516c-1685-4033-891f-64008f56a468-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4f7d516c-1685-4033-891f-64008f56a468\") " pod="openstack/nova-api-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.136170 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f954356-d9a2-4183-9033-adf859e722e4-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0f954356-d9a2-4183-9033-adf859e722e4\") " pod="openstack/nova-scheduler-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.137300 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f954356-d9a2-4183-9033-adf859e722e4-config-data\") pod \"nova-scheduler-0\" (UID: \"0f954356-d9a2-4183-9033-adf859e722e4\") " pod="openstack/nova-scheduler-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.137521 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f7d516c-1685-4033-891f-64008f56a468-public-tls-certs\") pod \"nova-api-0\" (UID: \"4f7d516c-1685-4033-891f-64008f56a468\") " pod="openstack/nova-api-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.141308 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f7d516c-1685-4033-891f-64008f56a468-config-data\") pod \"nova-api-0\" (UID: \"4f7d516c-1685-4033-891f-64008f56a468\") " pod="openstack/nova-api-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.150820 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pp7g\" (UniqueName: \"kubernetes.io/projected/4f7d516c-1685-4033-891f-64008f56a468-kube-api-access-6pp7g\") pod \"nova-api-0\" (UID: \"4f7d516c-1685-4033-891f-64008f56a468\") " pod="openstack/nova-api-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.151183 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrprx\" (UniqueName: \"kubernetes.io/projected/0f954356-d9a2-4183-9033-adf859e722e4-kube-api-access-xrprx\") pod \"nova-scheduler-0\" (UID: \"0f954356-d9a2-4183-9033-adf859e722e4\") " pod="openstack/nova-scheduler-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.197457 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.235565 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.694465 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 30 11:15:25 crc kubenswrapper[4869]: W0130 11:15:25.696794 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4f7d516c_1685_4033_891f_64008f56a468.slice/crio-6940607f480a945cedc3b03567a67b4d0c2cbf60d506d036567107979dcc4200 WatchSource:0}: Error finding container 6940607f480a945cedc3b03567a67b4d0c2cbf60d506d036567107979dcc4200: Status 404 returned error can't find the container with id 6940607f480a945cedc3b03567a67b4d0c2cbf60d506d036567107979dcc4200 Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.793844 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4f7d516c-1685-4033-891f-64008f56a468","Type":"ContainerStarted","Data":"6940607f480a945cedc3b03567a67b4d0c2cbf60d506d036567107979dcc4200"} Jan 30 11:15:25 crc kubenswrapper[4869]: W0130 11:15:25.799244 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0f954356_d9a2_4183_9033_adf859e722e4.slice/crio-0529f67053215369e9d9b7ea07ab951ef49f1f1667e6677d142fb5ca4a0066c1 WatchSource:0}: Error finding container 0529f67053215369e9d9b7ea07ab951ef49f1f1667e6677d142fb5ca4a0066c1: Status 404 returned error can't find the container with id 0529f67053215369e9d9b7ea07ab951ef49f1f1667e6677d142fb5ca4a0066c1 Jan 30 11:15:25 crc kubenswrapper[4869]: I0130 11:15:25.801410 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.147272 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7693e7f6-379b-4d43-88be-9bdc6a5c0a2b" path="/var/lib/kubelet/pods/7693e7f6-379b-4d43-88be-9bdc6a5c0a2b/volumes" Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.148320 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a01e50c7-dd24-4042-a0d9-58a62a6c946f" path="/var/lib/kubelet/pods/a01e50c7-dd24-4042-a0d9-58a62a6c946f/volumes" Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.250670 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="5a9a5962-e1b8-48ff-86c2-6464d47c9077" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": read tcp 10.217.0.2:58444->10.217.0.193:8775: read: connection reset by peer" Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.250670 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="5a9a5962-e1b8-48ff-86c2-6464d47c9077" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": read tcp 10.217.0.2:58460->10.217.0.193:8775: read: connection reset by peer" Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.719581 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.811182 4869 generic.go:334] "Generic (PLEG): container finished" podID="5a9a5962-e1b8-48ff-86c2-6464d47c9077" containerID="335df5527f2d4ffccc0419d6660dad38be781629607e030a9dd58a70dc5f8588" exitCode=0 Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.811259 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5a9a5962-e1b8-48ff-86c2-6464d47c9077","Type":"ContainerDied","Data":"335df5527f2d4ffccc0419d6660dad38be781629607e030a9dd58a70dc5f8588"} Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.811297 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5a9a5962-e1b8-48ff-86c2-6464d47c9077","Type":"ContainerDied","Data":"ce24b4b1421eaa1e0d0ada5a189b288b441e12fe2b45c10297ffc339a65da91c"} Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.811320 4869 scope.go:117] "RemoveContainer" containerID="335df5527f2d4ffccc0419d6660dad38be781629607e030a9dd58a70dc5f8588" Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.811465 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.816412 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4f7d516c-1685-4033-891f-64008f56a468","Type":"ContainerStarted","Data":"dcbc7f36a647ccce1c4f9cac0b03cc3ed28f4b6d411a2de239d056af9abe8648"} Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.816456 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4f7d516c-1685-4033-891f-64008f56a468","Type":"ContainerStarted","Data":"5edd9111518ad32c4899c1b6d327c4bda2bc8c9521a53b6ca409e82447732919"} Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.820296 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0f954356-d9a2-4183-9033-adf859e722e4","Type":"ContainerStarted","Data":"2252e68fa2bb62b190cb42ee1412af4d52c8c7aad25244c365965ac8e5919d92"} Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.820337 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0f954356-d9a2-4183-9033-adf859e722e4","Type":"ContainerStarted","Data":"0529f67053215369e9d9b7ea07ab951ef49f1f1667e6677d142fb5ca4a0066c1"} Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.838979 4869 scope.go:117] "RemoveContainer" containerID="845fdb029cafa9a7661b2b2962c2e99d40dab66762f37373702c79fa3717824b" Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.860568 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.850104879 podStartE2EDuration="2.850104879s" podCreationTimestamp="2026-01-30 11:15:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:15:26.846043464 +0000 UTC m=+1277.395919530" watchObservedRunningTime="2026-01-30 11:15:26.850104879 +0000 UTC m=+1277.399980945" Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.867654 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a9a5962-e1b8-48ff-86c2-6464d47c9077-combined-ca-bundle\") pod \"5a9a5962-e1b8-48ff-86c2-6464d47c9077\" (UID: \"5a9a5962-e1b8-48ff-86c2-6464d47c9077\") " Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.867697 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a9a5962-e1b8-48ff-86c2-6464d47c9077-nova-metadata-tls-certs\") pod \"5a9a5962-e1b8-48ff-86c2-6464d47c9077\" (UID: \"5a9a5962-e1b8-48ff-86c2-6464d47c9077\") " Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.867822 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a9a5962-e1b8-48ff-86c2-6464d47c9077-logs\") pod \"5a9a5962-e1b8-48ff-86c2-6464d47c9077\" (UID: \"5a9a5962-e1b8-48ff-86c2-6464d47c9077\") " Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.867843 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a9a5962-e1b8-48ff-86c2-6464d47c9077-config-data\") pod \"5a9a5962-e1b8-48ff-86c2-6464d47c9077\" (UID: \"5a9a5962-e1b8-48ff-86c2-6464d47c9077\") " Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.867880 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jz6hq\" (UniqueName: \"kubernetes.io/projected/5a9a5962-e1b8-48ff-86c2-6464d47c9077-kube-api-access-jz6hq\") pod \"5a9a5962-e1b8-48ff-86c2-6464d47c9077\" (UID: \"5a9a5962-e1b8-48ff-86c2-6464d47c9077\") " Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.871015 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a9a5962-e1b8-48ff-86c2-6464d47c9077-logs" (OuterVolumeSpecName: "logs") pod "5a9a5962-e1b8-48ff-86c2-6464d47c9077" (UID: "5a9a5962-e1b8-48ff-86c2-6464d47c9077"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.881905 4869 scope.go:117] "RemoveContainer" containerID="335df5527f2d4ffccc0419d6660dad38be781629607e030a9dd58a70dc5f8588" Jan 30 11:15:26 crc kubenswrapper[4869]: E0130 11:15:26.884204 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"335df5527f2d4ffccc0419d6660dad38be781629607e030a9dd58a70dc5f8588\": container with ID starting with 335df5527f2d4ffccc0419d6660dad38be781629607e030a9dd58a70dc5f8588 not found: ID does not exist" containerID="335df5527f2d4ffccc0419d6660dad38be781629607e030a9dd58a70dc5f8588" Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.884262 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"335df5527f2d4ffccc0419d6660dad38be781629607e030a9dd58a70dc5f8588"} err="failed to get container status \"335df5527f2d4ffccc0419d6660dad38be781629607e030a9dd58a70dc5f8588\": rpc error: code = NotFound desc = could not find container \"335df5527f2d4ffccc0419d6660dad38be781629607e030a9dd58a70dc5f8588\": container with ID starting with 335df5527f2d4ffccc0419d6660dad38be781629607e030a9dd58a70dc5f8588 not found: ID does not exist" Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.884293 4869 scope.go:117] "RemoveContainer" containerID="845fdb029cafa9a7661b2b2962c2e99d40dab66762f37373702c79fa3717824b" Jan 30 11:15:26 crc kubenswrapper[4869]: E0130 11:15:26.884590 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"845fdb029cafa9a7661b2b2962c2e99d40dab66762f37373702c79fa3717824b\": container with ID starting with 845fdb029cafa9a7661b2b2962c2e99d40dab66762f37373702c79fa3717824b not found: ID does not exist" containerID="845fdb029cafa9a7661b2b2962c2e99d40dab66762f37373702c79fa3717824b" Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.884614 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"845fdb029cafa9a7661b2b2962c2e99d40dab66762f37373702c79fa3717824b"} err="failed to get container status \"845fdb029cafa9a7661b2b2962c2e99d40dab66762f37373702c79fa3717824b\": rpc error: code = NotFound desc = could not find container \"845fdb029cafa9a7661b2b2962c2e99d40dab66762f37373702c79fa3717824b\": container with ID starting with 845fdb029cafa9a7661b2b2962c2e99d40dab66762f37373702c79fa3717824b not found: ID does not exist" Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.888361 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.888348775 podStartE2EDuration="2.888348775s" podCreationTimestamp="2026-01-30 11:15:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:15:26.867983537 +0000 UTC m=+1277.417859613" watchObservedRunningTime="2026-01-30 11:15:26.888348775 +0000 UTC m=+1277.438224841" Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.889685 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a9a5962-e1b8-48ff-86c2-6464d47c9077-kube-api-access-jz6hq" (OuterVolumeSpecName: "kube-api-access-jz6hq") pod "5a9a5962-e1b8-48ff-86c2-6464d47c9077" (UID: "5a9a5962-e1b8-48ff-86c2-6464d47c9077"). InnerVolumeSpecName "kube-api-access-jz6hq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.898855 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a9a5962-e1b8-48ff-86c2-6464d47c9077-config-data" (OuterVolumeSpecName: "config-data") pod "5a9a5962-e1b8-48ff-86c2-6464d47c9077" (UID: "5a9a5962-e1b8-48ff-86c2-6464d47c9077"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.908940 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a9a5962-e1b8-48ff-86c2-6464d47c9077-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5a9a5962-e1b8-48ff-86c2-6464d47c9077" (UID: "5a9a5962-e1b8-48ff-86c2-6464d47c9077"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.938171 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a9a5962-e1b8-48ff-86c2-6464d47c9077-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "5a9a5962-e1b8-48ff-86c2-6464d47c9077" (UID: "5a9a5962-e1b8-48ff-86c2-6464d47c9077"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.970441 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jz6hq\" (UniqueName: \"kubernetes.io/projected/5a9a5962-e1b8-48ff-86c2-6464d47c9077-kube-api-access-jz6hq\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.970481 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a9a5962-e1b8-48ff-86c2-6464d47c9077-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.970492 4869 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a9a5962-e1b8-48ff-86c2-6464d47c9077-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.970502 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a9a5962-e1b8-48ff-86c2-6464d47c9077-logs\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:26 crc kubenswrapper[4869]: I0130 11:15:26.970510 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a9a5962-e1b8-48ff-86c2-6464d47c9077-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.154029 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.173093 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.193919 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 30 11:15:27 crc kubenswrapper[4869]: E0130 11:15:27.194781 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a9a5962-e1b8-48ff-86c2-6464d47c9077" containerName="nova-metadata-metadata" Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.194801 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a9a5962-e1b8-48ff-86c2-6464d47c9077" containerName="nova-metadata-metadata" Jan 30 11:15:27 crc kubenswrapper[4869]: E0130 11:15:27.194817 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a9a5962-e1b8-48ff-86c2-6464d47c9077" containerName="nova-metadata-log" Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.194824 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a9a5962-e1b8-48ff-86c2-6464d47c9077" containerName="nova-metadata-log" Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.195277 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a9a5962-e1b8-48ff-86c2-6464d47c9077" containerName="nova-metadata-log" Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.195296 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a9a5962-e1b8-48ff-86c2-6464d47c9077" containerName="nova-metadata-metadata" Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.196620 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.199921 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.200088 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.214394 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.377884 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d945c4ca-288d-4e49-9048-b66894b7e97f-logs\") pod \"nova-metadata-0\" (UID: \"d945c4ca-288d-4e49-9048-b66894b7e97f\") " pod="openstack/nova-metadata-0" Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.377955 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d945c4ca-288d-4e49-9048-b66894b7e97f-config-data\") pod \"nova-metadata-0\" (UID: \"d945c4ca-288d-4e49-9048-b66894b7e97f\") " pod="openstack/nova-metadata-0" Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.377986 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d945c4ca-288d-4e49-9048-b66894b7e97f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d945c4ca-288d-4e49-9048-b66894b7e97f\") " pod="openstack/nova-metadata-0" Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.378049 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d945c4ca-288d-4e49-9048-b66894b7e97f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d945c4ca-288d-4e49-9048-b66894b7e97f\") " pod="openstack/nova-metadata-0" Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.378103 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gx5k6\" (UniqueName: \"kubernetes.io/projected/d945c4ca-288d-4e49-9048-b66894b7e97f-kube-api-access-gx5k6\") pod \"nova-metadata-0\" (UID: \"d945c4ca-288d-4e49-9048-b66894b7e97f\") " pod="openstack/nova-metadata-0" Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.480190 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d945c4ca-288d-4e49-9048-b66894b7e97f-config-data\") pod \"nova-metadata-0\" (UID: \"d945c4ca-288d-4e49-9048-b66894b7e97f\") " pod="openstack/nova-metadata-0" Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.480244 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d945c4ca-288d-4e49-9048-b66894b7e97f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d945c4ca-288d-4e49-9048-b66894b7e97f\") " pod="openstack/nova-metadata-0" Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.480281 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d945c4ca-288d-4e49-9048-b66894b7e97f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d945c4ca-288d-4e49-9048-b66894b7e97f\") " pod="openstack/nova-metadata-0" Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.480329 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gx5k6\" (UniqueName: \"kubernetes.io/projected/d945c4ca-288d-4e49-9048-b66894b7e97f-kube-api-access-gx5k6\") pod \"nova-metadata-0\" (UID: \"d945c4ca-288d-4e49-9048-b66894b7e97f\") " pod="openstack/nova-metadata-0" Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.480407 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d945c4ca-288d-4e49-9048-b66894b7e97f-logs\") pod \"nova-metadata-0\" (UID: \"d945c4ca-288d-4e49-9048-b66894b7e97f\") " pod="openstack/nova-metadata-0" Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.481204 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d945c4ca-288d-4e49-9048-b66894b7e97f-logs\") pod \"nova-metadata-0\" (UID: \"d945c4ca-288d-4e49-9048-b66894b7e97f\") " pod="openstack/nova-metadata-0" Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.488134 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d945c4ca-288d-4e49-9048-b66894b7e97f-config-data\") pod \"nova-metadata-0\" (UID: \"d945c4ca-288d-4e49-9048-b66894b7e97f\") " pod="openstack/nova-metadata-0" Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.488959 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d945c4ca-288d-4e49-9048-b66894b7e97f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d945c4ca-288d-4e49-9048-b66894b7e97f\") " pod="openstack/nova-metadata-0" Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.490420 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d945c4ca-288d-4e49-9048-b66894b7e97f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d945c4ca-288d-4e49-9048-b66894b7e97f\") " pod="openstack/nova-metadata-0" Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.521375 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gx5k6\" (UniqueName: \"kubernetes.io/projected/d945c4ca-288d-4e49-9048-b66894b7e97f-kube-api-access-gx5k6\") pod \"nova-metadata-0\" (UID: \"d945c4ca-288d-4e49-9048-b66894b7e97f\") " pod="openstack/nova-metadata-0" Jan 30 11:15:27 crc kubenswrapper[4869]: I0130 11:15:27.530013 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 11:15:28 crc kubenswrapper[4869]: W0130 11:15:28.123995 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd945c4ca_288d_4e49_9048_b66894b7e97f.slice/crio-cae39af33b79d68b57bb61d4f9858146a971c206a9ac2deefec629e83bda6d12 WatchSource:0}: Error finding container cae39af33b79d68b57bb61d4f9858146a971c206a9ac2deefec629e83bda6d12: Status 404 returned error can't find the container with id cae39af33b79d68b57bb61d4f9858146a971c206a9ac2deefec629e83bda6d12 Jan 30 11:15:28 crc kubenswrapper[4869]: I0130 11:15:28.126533 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 11:15:28 crc kubenswrapper[4869]: I0130 11:15:28.156188 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a9a5962-e1b8-48ff-86c2-6464d47c9077" path="/var/lib/kubelet/pods/5a9a5962-e1b8-48ff-86c2-6464d47c9077/volumes" Jan 30 11:15:28 crc kubenswrapper[4869]: I0130 11:15:28.841557 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d945c4ca-288d-4e49-9048-b66894b7e97f","Type":"ContainerStarted","Data":"f5b28fa841921137066788f807369cfb234084509157d818ea001da022898ab8"} Jan 30 11:15:28 crc kubenswrapper[4869]: I0130 11:15:28.841900 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d945c4ca-288d-4e49-9048-b66894b7e97f","Type":"ContainerStarted","Data":"393a2e711fafbe8c8153de69713ef0feb5af6833a38be6f6203958c4d1bd909f"} Jan 30 11:15:28 crc kubenswrapper[4869]: I0130 11:15:28.841915 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d945c4ca-288d-4e49-9048-b66894b7e97f","Type":"ContainerStarted","Data":"cae39af33b79d68b57bb61d4f9858146a971c206a9ac2deefec629e83bda6d12"} Jan 30 11:15:28 crc kubenswrapper[4869]: I0130 11:15:28.867895 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=1.867839697 podStartE2EDuration="1.867839697s" podCreationTimestamp="2026-01-30 11:15:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 11:15:28.86266611 +0000 UTC m=+1279.412542176" watchObservedRunningTime="2026-01-30 11:15:28.867839697 +0000 UTC m=+1279.417715763" Jan 30 11:15:30 crc kubenswrapper[4869]: I0130 11:15:30.236023 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 30 11:15:32 crc kubenswrapper[4869]: I0130 11:15:32.531497 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 30 11:15:32 crc kubenswrapper[4869]: I0130 11:15:32.532048 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 30 11:15:35 crc kubenswrapper[4869]: I0130 11:15:35.197920 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 30 11:15:35 crc kubenswrapper[4869]: I0130 11:15:35.198242 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 30 11:15:35 crc kubenswrapper[4869]: I0130 11:15:35.236654 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 30 11:15:35 crc kubenswrapper[4869]: I0130 11:15:35.264780 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 30 11:15:35 crc kubenswrapper[4869]: I0130 11:15:35.932817 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 30 11:15:36 crc kubenswrapper[4869]: I0130 11:15:36.209935 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4f7d516c-1685-4033-891f-64008f56a468" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 30 11:15:36 crc kubenswrapper[4869]: I0130 11:15:36.209957 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4f7d516c-1685-4033-891f-64008f56a468" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 30 11:15:37 crc kubenswrapper[4869]: I0130 11:15:37.531092 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 30 11:15:37 crc kubenswrapper[4869]: I0130 11:15:37.531618 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 30 11:15:38 crc kubenswrapper[4869]: I0130 11:15:38.547947 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d945c4ca-288d-4e49-9048-b66894b7e97f" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.206:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 30 11:15:38 crc kubenswrapper[4869]: I0130 11:15:38.547973 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d945c4ca-288d-4e49-9048-b66894b7e97f" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.206:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 30 11:15:41 crc kubenswrapper[4869]: I0130 11:15:41.023393 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 30 11:15:45 crc kubenswrapper[4869]: I0130 11:15:45.203963 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 30 11:15:45 crc kubenswrapper[4869]: I0130 11:15:45.204791 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 30 11:15:45 crc kubenswrapper[4869]: I0130 11:15:45.205022 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 30 11:15:45 crc kubenswrapper[4869]: I0130 11:15:45.213812 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 30 11:15:46 crc kubenswrapper[4869]: I0130 11:15:46.011757 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 30 11:15:46 crc kubenswrapper[4869]: I0130 11:15:46.018415 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 30 11:15:47 crc kubenswrapper[4869]: I0130 11:15:47.536552 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 30 11:15:47 crc kubenswrapper[4869]: I0130 11:15:47.539979 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 30 11:15:47 crc kubenswrapper[4869]: I0130 11:15:47.547755 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 30 11:15:48 crc kubenswrapper[4869]: I0130 11:15:48.037218 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 30 11:16:05 crc kubenswrapper[4869]: I0130 11:16:05.214755 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-7037-account-create-update-46qmh"] Jan 30 11:16:05 crc kubenswrapper[4869]: I0130 11:16:05.230360 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-7037-account-create-update-46qmh"] Jan 30 11:16:05 crc kubenswrapper[4869]: I0130 11:16:05.272833 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-7037-account-create-update-28qzk"] Jan 30 11:16:05 crc kubenswrapper[4869]: I0130 11:16:05.274232 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-7037-account-create-update-28qzk" Jan 30 11:16:05 crc kubenswrapper[4869]: I0130 11:16:05.288539 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Jan 30 11:16:05 crc kubenswrapper[4869]: I0130 11:16:05.316624 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-7037-account-create-update-28qzk"] Jan 30 11:16:05 crc kubenswrapper[4869]: I0130 11:16:05.415657 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a07399e-d252-46b3-823d-3fabceb4b671-operator-scripts\") pod \"cinder-7037-account-create-update-28qzk\" (UID: \"5a07399e-d252-46b3-823d-3fabceb4b671\") " pod="openstack/cinder-7037-account-create-update-28qzk" Jan 30 11:16:05 crc kubenswrapper[4869]: I0130 11:16:05.415759 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hkds\" (UniqueName: \"kubernetes.io/projected/5a07399e-d252-46b3-823d-3fabceb4b671-kube-api-access-2hkds\") pod \"cinder-7037-account-create-update-28qzk\" (UID: \"5a07399e-d252-46b3-823d-3fabceb4b671\") " pod="openstack/cinder-7037-account-create-update-28qzk" Jan 30 11:16:05 crc kubenswrapper[4869]: I0130 11:16:05.431991 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-qfh9c"] Jan 30 11:16:05 crc kubenswrapper[4869]: I0130 11:16:05.486955 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-qfh9c"] Jan 30 11:16:05 crc kubenswrapper[4869]: I0130 11:16:05.517366 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a07399e-d252-46b3-823d-3fabceb4b671-operator-scripts\") pod \"cinder-7037-account-create-update-28qzk\" (UID: \"5a07399e-d252-46b3-823d-3fabceb4b671\") " pod="openstack/cinder-7037-account-create-update-28qzk" Jan 30 11:16:05 crc kubenswrapper[4869]: I0130 11:16:05.517428 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hkds\" (UniqueName: \"kubernetes.io/projected/5a07399e-d252-46b3-823d-3fabceb4b671-kube-api-access-2hkds\") pod \"cinder-7037-account-create-update-28qzk\" (UID: \"5a07399e-d252-46b3-823d-3fabceb4b671\") " pod="openstack/cinder-7037-account-create-update-28qzk" Jan 30 11:16:05 crc kubenswrapper[4869]: I0130 11:16:05.518995 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a07399e-d252-46b3-823d-3fabceb4b671-operator-scripts\") pod \"cinder-7037-account-create-update-28qzk\" (UID: \"5a07399e-d252-46b3-823d-3fabceb4b671\") " pod="openstack/cinder-7037-account-create-update-28qzk" Jan 30 11:16:05 crc kubenswrapper[4869]: I0130 11:16:05.526105 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-70de-account-create-update-qkqf9"] Jan 30 11:16:05 crc kubenswrapper[4869]: I0130 11:16:05.574608 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-70de-account-create-update-qkqf9"] Jan 30 11:16:05 crc kubenswrapper[4869]: I0130 11:16:05.584606 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hkds\" (UniqueName: \"kubernetes.io/projected/5a07399e-d252-46b3-823d-3fabceb4b671-kube-api-access-2hkds\") pod \"cinder-7037-account-create-update-28qzk\" (UID: \"5a07399e-d252-46b3-823d-3fabceb4b671\") " pod="openstack/cinder-7037-account-create-update-28qzk" Jan 30 11:16:05 crc kubenswrapper[4869]: I0130 11:16:05.627437 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-7037-account-create-update-28qzk" Jan 30 11:16:05 crc kubenswrapper[4869]: I0130 11:16:05.801557 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-zjpp9"] Jan 30 11:16:05 crc kubenswrapper[4869]: I0130 11:16:05.827137 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-zjpp9" Jan 30 11:16:05 crc kubenswrapper[4869]: I0130 11:16:05.859223 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Jan 30 11:16:05 crc kubenswrapper[4869]: I0130 11:16:05.868796 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 30 11:16:05 crc kubenswrapper[4869]: I0130 11:16:05.883131 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-zjpp9"] Jan 30 11:16:05 crc kubenswrapper[4869]: I0130 11:16:05.933559 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Jan 30 11:16:05 crc kubenswrapper[4869]: I0130 11:16:05.933935 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="e7503066-4e9b-410e-b83e-04ec6c2dc05c" containerName="openstackclient" containerID="cri-o://c12e6d9a2497ce86f69e26eedbbe77a594f0dded08c3af2c1fa95dcc0378cc5e" gracePeriod=2 Jan 30 11:16:05 crc kubenswrapper[4869]: I0130 11:16:05.963985 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:05.999810 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-70de-account-create-update-9sbz8"] Jan 30 11:16:06 crc kubenswrapper[4869]: E0130 11:16:06.000382 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7503066-4e9b-410e-b83e-04ec6c2dc05c" containerName="openstackclient" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.000401 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7503066-4e9b-410e-b83e-04ec6c2dc05c" containerName="openstackclient" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.000614 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7503066-4e9b-410e-b83e-04ec6c2dc05c" containerName="openstackclient" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.001266 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-70de-account-create-update-9sbz8" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.012238 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.034484 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-70de-account-create-update-9sbz8"] Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.069283 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5b9f902-0038-4057-b1c2-66222926c1b5-operator-scripts\") pod \"root-account-create-update-zjpp9\" (UID: \"f5b9f902-0038-4057-b1c2-66222926c1b5\") " pod="openstack/root-account-create-update-zjpp9" Jan 30 11:16:06 crc kubenswrapper[4869]: E0130 11:16:06.069406 4869 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.069435 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fv5c8\" (UniqueName: \"kubernetes.io/projected/f5b9f902-0038-4057-b1c2-66222926c1b5-kube-api-access-fv5c8\") pod \"root-account-create-update-zjpp9\" (UID: \"f5b9f902-0038-4057-b1c2-66222926c1b5\") " pod="openstack/root-account-create-update-zjpp9" Jan 30 11:16:06 crc kubenswrapper[4869]: E0130 11:16:06.069506 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-config-data podName:4d1e4183-a136-428f-9bd8-e857a603da8f nodeName:}" failed. No retries permitted until 2026-01-30 11:16:06.569482784 +0000 UTC m=+1317.119358890 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-config-data") pod "rabbitmq-server-0" (UID: "4d1e4183-a136-428f-9bd8-e857a603da8f") : configmap "rabbitmq-config-data" not found Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.138440 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-86a6-account-create-update-44pzz"] Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.175557 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b9027f1-3da9-4ee0-a3bd-1041accd8f3b" path="/var/lib/kubelet/pods/8b9027f1-3da9-4ee0-a3bd-1041accd8f3b/volumes" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.180090 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/48318921-34ba-442b-b9f0-6f7057d5cdf5-operator-scripts\") pod \"barbican-70de-account-create-update-9sbz8\" (UID: \"48318921-34ba-442b-b9f0-6f7057d5cdf5\") " pod="openstack/barbican-70de-account-create-update-9sbz8" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.180197 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5b9f902-0038-4057-b1c2-66222926c1b5-operator-scripts\") pod \"root-account-create-update-zjpp9\" (UID: \"f5b9f902-0038-4057-b1c2-66222926c1b5\") " pod="openstack/root-account-create-update-zjpp9" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.180227 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4lbr7\" (UniqueName: \"kubernetes.io/projected/48318921-34ba-442b-b9f0-6f7057d5cdf5-kube-api-access-4lbr7\") pod \"barbican-70de-account-create-update-9sbz8\" (UID: \"48318921-34ba-442b-b9f0-6f7057d5cdf5\") " pod="openstack/barbican-70de-account-create-update-9sbz8" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.180335 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fv5c8\" (UniqueName: \"kubernetes.io/projected/f5b9f902-0038-4057-b1c2-66222926c1b5-kube-api-access-fv5c8\") pod \"root-account-create-update-zjpp9\" (UID: \"f5b9f902-0038-4057-b1c2-66222926c1b5\") " pod="openstack/root-account-create-update-zjpp9" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.183297 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5b9f902-0038-4057-b1c2-66222926c1b5-operator-scripts\") pod \"root-account-create-update-zjpp9\" (UID: \"f5b9f902-0038-4057-b1c2-66222926c1b5\") " pod="openstack/root-account-create-update-zjpp9" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.192727 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1c9c7cc-73fd-41bb-8418-126303047e6a" path="/var/lib/kubelet/pods/b1c9c7cc-73fd-41bb-8418-126303047e6a/volumes" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.193315 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b" path="/var/lib/kubelet/pods/fb7f72ff-fc2f-48ee-9fd9-ed0b2e75295b/volumes" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.193897 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-86a6-account-create-update-44pzz"] Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.214980 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.215216 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="493ac356-9bec-4285-850c-8e3c7739641e" containerName="ovn-northd" containerID="cri-o://f692aea75d830feaf6c4d4138b6ab879aba7ddaa94979a62ad5644fdd0c6ddf5" gracePeriod=30 Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.215448 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="493ac356-9bec-4285-850c-8e3c7739641e" containerName="openstack-network-exporter" containerID="cri-o://86b7a253a11330df89e12fcb2cd867c724eb1b70d728e6ac436f033ef2e552e4" gracePeriod=30 Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.235628 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fv5c8\" (UniqueName: \"kubernetes.io/projected/f5b9f902-0038-4057-b1c2-66222926c1b5-kube-api-access-fv5c8\") pod \"root-account-create-update-zjpp9\" (UID: \"f5b9f902-0038-4057-b1c2-66222926c1b5\") " pod="openstack/root-account-create-update-zjpp9" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.274629 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-f8b5-account-create-update-d6m9c"] Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.282007 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/48318921-34ba-442b-b9f0-6f7057d5cdf5-operator-scripts\") pod \"barbican-70de-account-create-update-9sbz8\" (UID: \"48318921-34ba-442b-b9f0-6f7057d5cdf5\") " pod="openstack/barbican-70de-account-create-update-9sbz8" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.282227 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4lbr7\" (UniqueName: \"kubernetes.io/projected/48318921-34ba-442b-b9f0-6f7057d5cdf5-kube-api-access-4lbr7\") pod \"barbican-70de-account-create-update-9sbz8\" (UID: \"48318921-34ba-442b-b9f0-6f7057d5cdf5\") " pod="openstack/barbican-70de-account-create-update-9sbz8" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.284251 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/48318921-34ba-442b-b9f0-6f7057d5cdf5-operator-scripts\") pod \"barbican-70de-account-create-update-9sbz8\" (UID: \"48318921-34ba-442b-b9f0-6f7057d5cdf5\") " pod="openstack/barbican-70de-account-create-update-9sbz8" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.307941 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-f8b5-account-create-update-d6m9c"] Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.352395 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4lbr7\" (UniqueName: \"kubernetes.io/projected/48318921-34ba-442b-b9f0-6f7057d5cdf5-kube-api-access-4lbr7\") pod \"barbican-70de-account-create-update-9sbz8\" (UID: \"48318921-34ba-442b-b9f0-6f7057d5cdf5\") " pod="openstack/barbican-70de-account-create-update-9sbz8" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.374206 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-86a6-account-create-update-ch52c"] Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.375510 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-86a6-account-create-update-ch52c" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.383196 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.432040 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-86a6-account-create-update-ch52c"] Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.474145 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-5ctps"] Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.499770 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b9320a43-d34c-4f43-b304-8f6414a44b33-operator-scripts\") pod \"nova-api-86a6-account-create-update-ch52c\" (UID: \"b9320a43-d34c-4f43-b304-8f6414a44b33\") " pod="openstack/nova-api-86a6-account-create-update-ch52c" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.499904 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dmdx\" (UniqueName: \"kubernetes.io/projected/b9320a43-d34c-4f43-b304-8f6414a44b33-kube-api-access-7dmdx\") pod \"nova-api-86a6-account-create-update-ch52c\" (UID: \"b9320a43-d34c-4f43-b304-8f6414a44b33\") " pod="openstack/nova-api-86a6-account-create-update-ch52c" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.514593 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-5ctps"] Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.517037 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-zjpp9" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.538929 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-8662-account-create-update-nq7mx"] Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.554033 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-8662-account-create-update-nq7mx"] Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.605842 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dmdx\" (UniqueName: \"kubernetes.io/projected/b9320a43-d34c-4f43-b304-8f6414a44b33-kube-api-access-7dmdx\") pod \"nova-api-86a6-account-create-update-ch52c\" (UID: \"b9320a43-d34c-4f43-b304-8f6414a44b33\") " pod="openstack/nova-api-86a6-account-create-update-ch52c" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.605972 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b9320a43-d34c-4f43-b304-8f6414a44b33-operator-scripts\") pod \"nova-api-86a6-account-create-update-ch52c\" (UID: \"b9320a43-d34c-4f43-b304-8f6414a44b33\") " pod="openstack/nova-api-86a6-account-create-update-ch52c" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.606772 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b9320a43-d34c-4f43-b304-8f6414a44b33-operator-scripts\") pod \"nova-api-86a6-account-create-update-ch52c\" (UID: \"b9320a43-d34c-4f43-b304-8f6414a44b33\") " pod="openstack/nova-api-86a6-account-create-update-ch52c" Jan 30 11:16:06 crc kubenswrapper[4869]: E0130 11:16:06.607073 4869 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 30 11:16:06 crc kubenswrapper[4869]: E0130 11:16:06.607113 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-config-data podName:4d1e4183-a136-428f-9bd8-e857a603da8f nodeName:}" failed. No retries permitted until 2026-01-30 11:16:07.607100273 +0000 UTC m=+1318.156976339 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-config-data") pod "rabbitmq-server-0" (UID: "4d1e4183-a136-428f-9bd8-e857a603da8f") : configmap "rabbitmq-config-data" not found Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.617647 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.618180 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="aefb9658-d09a-4e8d-9769-3d6133bd4b2c" containerName="openstack-network-exporter" containerID="cri-o://9f060107943b0642dfd7e507c493ff833b9b292bb9f38467328dd22ddf77c864" gracePeriod=300 Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.654410 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-c83f-account-create-update-hffbp"] Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.657014 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-70de-account-create-update-9sbz8" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.711796 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.712571 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="286d79ce-b123-48b8-b8d1-9a1696fe00bb" containerName="openstack-network-exporter" containerID="cri-o://9a8f8895d0bd2c0f894fad76153cef03bee6e3dab153bccb600a99368ebe01e6" gracePeriod=300 Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.713528 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dmdx\" (UniqueName: \"kubernetes.io/projected/b9320a43-d34c-4f43-b304-8f6414a44b33-kube-api-access-7dmdx\") pod \"nova-api-86a6-account-create-update-ch52c\" (UID: \"b9320a43-d34c-4f43-b304-8f6414a44b33\") " pod="openstack/nova-api-86a6-account-create-update-ch52c" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.750800 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-c83f-account-create-update-hffbp"] Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.766927 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/openstack-cell1-galera-0" podUID="4618ceff-14a9-4866-aa22-e29767d8d7e4" containerName="galera" probeResult="failure" output="command timed out" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.800824 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-8f65-account-create-update-mz5hf"] Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.808153 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-86a6-account-create-update-ch52c" Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.823379 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-sj9ql"] Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.850291 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-ngc5k"] Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.872513 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-8f65-account-create-update-mz5hf"] Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.921114 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-c00b-account-create-update-thzpf"] Jan 30 11:16:06 crc kubenswrapper[4869]: I0130 11:16:06.954693 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-mr25n"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.045450 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-ngc5k"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.150433 4869 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/glance-default-internal-api-0" secret="" err="secret \"glance-glance-dockercfg-jfgbn\" not found" Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.172901 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-sj9ql"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.309037 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="286d79ce-b123-48b8-b8d1-9a1696fe00bb" containerName="ovsdbserver-nb" containerID="cri-o://ceb1a3a6706024f356a33ef4db537324ff7ad4fc04da03e91359bb65670ed582" gracePeriod=300 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.309734 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="aefb9658-d09a-4e8d-9769-3d6133bd4b2c" containerName="ovsdbserver-sb" containerID="cri-o://f645ebc573f7dd5869dcdf3ccfab2bce9e8305d65b43a7373e7b2cef92aec27f" gracePeriod=300 Jan 30 11:16:07 crc kubenswrapper[4869]: W0130 11:16:07.311663 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5a07399e_d252_46b3_823d_3fabceb4b671.slice/crio-0b3c2aa53e448a218994b086914a1894920c8af537076a775688de8b5628ce63 WatchSource:0}: Error finding container 0b3c2aa53e448a218994b086914a1894920c8af537076a775688de8b5628ce63: Status 404 returned error can't find the container with id 0b3c2aa53e448a218994b086914a1894920c8af537076a775688de8b5628ce63 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.322791 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-mr25n"] Jan 30 11:16:07 crc kubenswrapper[4869]: E0130 11:16:07.340769 4869 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 30 11:16:07 crc kubenswrapper[4869]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[/bin/sh -c #!/bin/bash Jan 30 11:16:07 crc kubenswrapper[4869]: Jan 30 11:16:07 crc kubenswrapper[4869]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 30 11:16:07 crc kubenswrapper[4869]: Jan 30 11:16:07 crc kubenswrapper[4869]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 30 11:16:07 crc kubenswrapper[4869]: Jan 30 11:16:07 crc kubenswrapper[4869]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 30 11:16:07 crc kubenswrapper[4869]: Jan 30 11:16:07 crc kubenswrapper[4869]: if [ -n "cinder" ]; then Jan 30 11:16:07 crc kubenswrapper[4869]: GRANT_DATABASE="cinder" Jan 30 11:16:07 crc kubenswrapper[4869]: else Jan 30 11:16:07 crc kubenswrapper[4869]: GRANT_DATABASE="*" Jan 30 11:16:07 crc kubenswrapper[4869]: fi Jan 30 11:16:07 crc kubenswrapper[4869]: Jan 30 11:16:07 crc kubenswrapper[4869]: # going for maximum compatibility here: Jan 30 11:16:07 crc kubenswrapper[4869]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 30 11:16:07 crc kubenswrapper[4869]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 30 11:16:07 crc kubenswrapper[4869]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 30 11:16:07 crc kubenswrapper[4869]: # support updates Jan 30 11:16:07 crc kubenswrapper[4869]: Jan 30 11:16:07 crc kubenswrapper[4869]: $MYSQL_CMD < logger="UnhandledError" Jan 30 11:16:07 crc kubenswrapper[4869]: E0130 11:16:07.348241 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"cinder-db-secret\\\" not found\"" pod="openstack/cinder-7037-account-create-update-28qzk" podUID="5a07399e-d252-46b3-823d-3fabceb4b671" Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.368472 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-c00b-account-create-update-thzpf"] Jan 30 11:16:07 crc kubenswrapper[4869]: E0130 11:16:07.409758 4869 secret.go:188] Couldn't get secret openstack/glance-default-internal-config-data: secret "glance-default-internal-config-data" not found Jan 30 11:16:07 crc kubenswrapper[4869]: E0130 11:16:07.409840 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-config-data podName:ec34c29c-665f-465a-99d0-c342aca2cf14 nodeName:}" failed. No retries permitted until 2026-01-30 11:16:07.909815703 +0000 UTC m=+1318.459691769 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-config-data") pod "glance-default-internal-api-0" (UID: "ec34c29c-665f-465a-99d0-c342aca2cf14") : secret "glance-default-internal-config-data" not found Jan 30 11:16:07 crc kubenswrapper[4869]: E0130 11:16:07.410908 4869 secret.go:188] Couldn't get secret openstack/glance-scripts: secret "glance-scripts" not found Jan 30 11:16:07 crc kubenswrapper[4869]: E0130 11:16:07.410939 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-scripts podName:ec34c29c-665f-465a-99d0-c342aca2cf14 nodeName:}" failed. No retries permitted until 2026-01-30 11:16:07.910928055 +0000 UTC m=+1318.460804111 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-scripts") pod "glance-default-internal-api-0" (UID: "ec34c29c-665f-465a-99d0-c342aca2cf14") : secret "glance-scripts" not found Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.411200 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-vwj7x"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.418493 4869 generic.go:334] "Generic (PLEG): container finished" podID="493ac356-9bec-4285-850c-8e3c7739641e" containerID="86b7a253a11330df89e12fcb2cd867c724eb1b70d728e6ac436f033ef2e552e4" exitCode=2 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.418554 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"493ac356-9bec-4285-850c-8e3c7739641e","Type":"ContainerDied","Data":"86b7a253a11330df89e12fcb2cd867c724eb1b70d728e6ac436f033ef2e552e4"} Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.421216 4869 generic.go:334] "Generic (PLEG): container finished" podID="286d79ce-b123-48b8-b8d1-9a1696fe00bb" containerID="9a8f8895d0bd2c0f894fad76153cef03bee6e3dab153bccb600a99368ebe01e6" exitCode=2 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.421518 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"286d79ce-b123-48b8-b8d1-9a1696fe00bb","Type":"ContainerDied","Data":"9a8f8895d0bd2c0f894fad76153cef03bee6e3dab153bccb600a99368ebe01e6"} Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.453006 4869 generic.go:334] "Generic (PLEG): container finished" podID="aefb9658-d09a-4e8d-9769-3d6133bd4b2c" containerID="9f060107943b0642dfd7e507c493ff833b9b292bb9f38467328dd22ddf77c864" exitCode=2 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.453073 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"aefb9658-d09a-4e8d-9769-3d6133bd4b2c","Type":"ContainerDied","Data":"9f060107943b0642dfd7e507c493ff833b9b292bb9f38467328dd22ddf77c864"} Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.470701 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-vwj7x"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.498563 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.511768 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-pwgmc"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.579152 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-pwgmc"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.590792 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-kcn5x"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.597189 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-kcn5x"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.607050 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.607345 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="161960a2-9537-4f72-913b-54b23f2b4be7" containerName="cinder-scheduler" containerID="cri-o://eb26d980e7e5df4635ca9343e2f611e5ce78edd230c1081616317f662f549187" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.607940 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="161960a2-9537-4f72-913b-54b23f2b4be7" containerName="probe" containerID="cri-o://12100bca04bc598644a6f4f21b6886c14fe32934d7b28768c12d907536df5008" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.617056 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-gm6nb"] Jan 30 11:16:07 crc kubenswrapper[4869]: E0130 11:16:07.621775 4869 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 30 11:16:07 crc kubenswrapper[4869]: E0130 11:16:07.621837 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-config-data podName:4d1e4183-a136-428f-9bd8-e857a603da8f nodeName:}" failed. No retries permitted until 2026-01-30 11:16:09.621819615 +0000 UTC m=+1320.171695681 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-config-data") pod "rabbitmq-server-0" (UID: "4d1e4183-a136-428f-9bd8-e857a603da8f") : configmap "rabbitmq-config-data" not found Jan 30 11:16:07 crc kubenswrapper[4869]: E0130 11:16:07.622439 4869 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 30 11:16:07 crc kubenswrapper[4869]: E0130 11:16:07.622474 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-config-data podName:15b1a123-3831-4fa6-bc52-3f0cf30953f9 nodeName:}" failed. No retries permitted until 2026-01-30 11:16:08.122464593 +0000 UTC m=+1318.672340659 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-config-data") pod "rabbitmq-cell1-server-0" (UID: "15b1a123-3831-4fa6-bc52-3f0cf30953f9") : configmap "rabbitmq-cell1-config-data" not found Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.628901 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-jfzdq"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.643852 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-jrjbc"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.644173 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-metrics-jrjbc" podUID="333bf862-5ea9-43df-926f-5d8e463b2c80" containerName="openstack-network-exporter" containerID="cri-o://8dd5852d63fc179fcf40f8c40a3a27e1e4ca18f73ee8966299e8329c0ac8d776" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.652454 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.652789 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="65d95395-5aea-4546-b12a-ec8ce58ec704" containerName="cinder-api-log" containerID="cri-o://f379626d704637c04266aaeddc7b56416c9104be8850dbe4859f262fe2550259" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.652933 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="65d95395-5aea-4546-b12a-ec8ce58ec704" containerName="cinder-api" containerID="cri-o://2d8084a7cade6c321549a9768cbcd158ea761ca605ab57b0333c081b4ad26652" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.667838 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-r98zg"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.668170 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" podUID="c446ec70-c06c-4301-987c-423882ca1469" containerName="dnsmasq-dns" containerID="cri-o://ac0db009767c9d20a2e2fbda72d982d36dc79b4ff0b0a3d504db1abca9f191e3" gracePeriod=10 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.678849 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-kx76b"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.694458 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-7037-account-create-update-28qzk"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.709175 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-kx76b"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.721474 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-587f84cd84-zqhxn"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.721848 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-587f84cd84-zqhxn" podUID="323f79a2-48c7-4768-8707-23bc31755a50" containerName="placement-log" containerID="cri-o://740a2738961798bcc0a9b14e30b355bacd116adc8c93775ec46f0050fa91974f" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.722374 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-587f84cd84-zqhxn" podUID="323f79a2-48c7-4768-8707-23bc31755a50" containerName="placement-api" containerID="cri-o://d02ef15f5ba93cc3d7b5586f76cca1e5e8d3253af837813c4d8c7db13197b4d6" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.750408 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.751270 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="account-server" containerID="cri-o://e8224a2745bbe075dafb160da11e1dafa60447cd322ab3064698b3cb694f996d" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.751926 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="swift-recon-cron" containerID="cri-o://e6f93331ee688f8b3d08b68521bc6870dc9ec3fe42c2459935fb484bbb47b43b" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.752010 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="rsync" containerID="cri-o://65dd5e75cb2c7b06492f5521d624b519679d57118d8f7a7a48edaca957d584a1" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.752052 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="object-expirer" containerID="cri-o://13ba9f6f7912e14e6780c387ebc1a2663d74dda7b49e064fb9abd88ab5e57f99" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.752123 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="container-replicator" containerID="cri-o://e88f5b9e87740566349e22c0ad0a51c60c75ada4b7cc83a0dac45fffd0379d49" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.752147 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="container-auditor" containerID="cri-o://b439885a7cc9ae304f7f36de7bcb53459f7f5b2bd2fc41ec4f6c9ebd8922553f" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.752186 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="container-server" containerID="cri-o://124390ed6584fb765f5eaac9acb1121748399b48696c737116ce08db4dd7bfb0" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.752212 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="object-replicator" containerID="cri-o://6f9aa9147f317463724ec1dff3a40f0f2085d959d4963346ddc72ee9e85fd348" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.752244 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="account-reaper" containerID="cri-o://9062f1d85cf44cdab858a6bee6da1f5afe762771c26d3b6e53a3b618a6533e24" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.752277 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="object-server" containerID="cri-o://5ef059d18651368542240591ca6f5fe4c03b5e8aa1d605e33f266d5c65f87088" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.752283 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="account-auditor" containerID="cri-o://59365e20746e6cb7518cf9197ed2d89e60bb59adac7d8138ec6dae90296333f2" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.752316 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="container-updater" containerID="cri-o://20275096c286a0ae8845e55c14e269921083b740de5bce384f7ce1e846435e9b" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.752340 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="account-replicator" containerID="cri-o://f485c0dfd027365de561b952055dc9630a3eb742e8b7b5fbc78313ec86a5772e" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.752368 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="object-updater" containerID="cri-o://f8504fd0ff5794faf6bacd1fef665e7a9cd6fedeedff24e1b282e17c56837bea" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.752397 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="object-auditor" containerID="cri-o://97e9034f3bb0c1adcd5ddc73729cfb8a5551a31bba376f2f3e02026b27ed1f80" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.762008 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-5rjwn"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.773646 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-5rjwn"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.793047 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-lvfzc"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.805138 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-lvfzc"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.819608 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.839188 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-7037-account-create-update-28qzk"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.848546 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-df6cb98f-8s46w"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.848948 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-df6cb98f-8s46w" podUID="484ba6c3-20dc-4b27-b7f5-901eef0643a7" containerName="neutron-api" containerID="cri-o://a381b048e1d6b21ec22cf7214a1b8e39fa926bed88e85c72ebe8ab5f7bfa2c5b" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.849635 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-df6cb98f-8s46w" podUID="484ba6c3-20dc-4b27-b7f5-901eef0643a7" containerName="neutron-httpd" containerID="cri-o://a5570db7baf6da4c91df79cef463e37a3e6477f10e7ccd7079f3bf311e981158" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.852397 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-zsp2z"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.868273 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-zsp2z"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.880106 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.880499 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4f7d516c-1685-4033-891f-64008f56a468" containerName="nova-api-log" containerID="cri-o://5edd9111518ad32c4899c1b6d327c4bda2bc8c9521a53b6ca409e82447732919" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.881060 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4f7d516c-1685-4033-891f-64008f56a468" containerName="nova-api-api" containerID="cri-o://dcbc7f36a647ccce1c4f9cac0b03cc3ed28f4b6d411a2de239d056af9abe8648" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.895650 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.896014 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d945c4ca-288d-4e49-9048-b66894b7e97f" containerName="nova-metadata-log" containerID="cri-o://393a2e711fafbe8c8153de69713ef0feb5af6833a38be6f6203958c4d1bd909f" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.896631 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d945c4ca-288d-4e49-9048-b66894b7e97f" containerName="nova-metadata-metadata" containerID="cri-o://f5b28fa841921137066788f807369cfb234084509157d818ea001da022898ab8" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.922540 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.922885 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ec34c29c-665f-465a-99d0-c342aca2cf14" containerName="glance-log" containerID="cri-o://ecbb01cd1bd2c82e2943bf2f2101e86a6978d1af67f8df56a057053148d323ab" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.923595 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ec34c29c-665f-465a-99d0-c342aca2cf14" containerName="glance-httpd" containerID="cri-o://6f58056d40518f7f08d5b89fddc6140fcb4975e4b4047401e6bcc1e12b2f6a6f" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: E0130 11:16:07.940905 4869 secret.go:188] Couldn't get secret openstack/glance-scripts: secret "glance-scripts" not found Jan 30 11:16:07 crc kubenswrapper[4869]: E0130 11:16:07.942491 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-scripts podName:ec34c29c-665f-465a-99d0-c342aca2cf14 nodeName:}" failed. No retries permitted until 2026-01-30 11:16:08.942468913 +0000 UTC m=+1319.492344979 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-scripts") pod "glance-default-internal-api-0" (UID: "ec34c29c-665f-465a-99d0-c342aca2cf14") : secret "glance-scripts" not found Jan 30 11:16:07 crc kubenswrapper[4869]: E0130 11:16:07.942879 4869 secret.go:188] Couldn't get secret openstack/glance-default-internal-config-data: secret "glance-default-internal-config-data" not found Jan 30 11:16:07 crc kubenswrapper[4869]: E0130 11:16:07.942913 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-config-data podName:ec34c29c-665f-465a-99d0-c342aca2cf14 nodeName:}" failed. No retries permitted until 2026-01-30 11:16:08.942905385 +0000 UTC m=+1319.492781451 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-config-data") pod "glance-default-internal-api-0" (UID: "ec34c29c-665f-465a-99d0-c342aca2cf14") : secret "glance-default-internal-config-data" not found Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.946116 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-644f9f48bf-ccrr2"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.946414 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-644f9f48bf-ccrr2" podUID="e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f" containerName="proxy-httpd" containerID="cri-o://e05f057e1226a4ecd5362991160142f5969097d16d333ab689e4639d3978f4f8" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.946984 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-644f9f48bf-ccrr2" podUID="e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f" containerName="proxy-server" containerID="cri-o://30e76731331681ee54fc2b12405950fb45c6d15c16f3d7f16a01c29ca55daa7e" gracePeriod=30 Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.968292 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-sm7m6"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.986633 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-sm7m6"] Jan 30 11:16:07 crc kubenswrapper[4869]: I0130 11:16:07.993518 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-jqsls"] Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.020765 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-66466f9898-mzt77"] Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.046970 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-jqsls"] Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.050247 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-66466f9898-mzt77" podUID="45d8f6aa-887f-444b-81c8-7bf6c03993c9" containerName="barbican-keystone-listener" containerID="cri-o://e9752fd0d18f235bdd601cdc37759bd12b8f72d28d609ba99e7c988552e2f109" gracePeriod=30 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.051837 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-66466f9898-mzt77" podUID="45d8f6aa-887f-444b-81c8-7bf6c03993c9" containerName="barbican-keystone-listener-log" containerID="cri-o://c09c667d7f6ac623e362575529ea4aab1bd220f1c6756e69b8ca1ef9977354ae" gracePeriod=30 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.186063 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4" path="/var/lib/kubelet/pods/00f8d3ce-c28c-4e6d-9a22-a48bb7aeccb4/volumes" Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.234602 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03699fa5-87c3-42b4-907b-586fa9d208af" path="/var/lib/kubelet/pods/03699fa5-87c3-42b4-907b-586fa9d208af/volumes" Jan 30 11:16:08 crc kubenswrapper[4869]: E0130 11:16:08.240464 4869 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 30 11:16:08 crc kubenswrapper[4869]: E0130 11:16:08.240565 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-config-data podName:15b1a123-3831-4fa6-bc52-3f0cf30953f9 nodeName:}" failed. No retries permitted until 2026-01-30 11:16:09.240545249 +0000 UTC m=+1319.790421315 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-config-data") pod "rabbitmq-cell1-server-0" (UID: "15b1a123-3831-4fa6-bc52-3f0cf30953f9") : configmap "rabbitmq-cell1-config-data" not found Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.243307 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0caf28f8-d084-42f8-aa84-bb7ed8ec0fed" path="/var/lib/kubelet/pods/0caf28f8-d084-42f8-aa84-bb7ed8ec0fed/volumes" Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.244589 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1293c111-bc92-47ac-aaf0-ae153a289832" path="/var/lib/kubelet/pods/1293c111-bc92-47ac-aaf0-ae153a289832/volumes" Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.245375 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35a2d1cd-798d-4e72-a4fc-b0609e30f714" path="/var/lib/kubelet/pods/35a2d1cd-798d-4e72-a4fc-b0609e30f714/volumes" Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.246141 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="413090e9-1b8c-43a1-9550-150f0baf022f" path="/var/lib/kubelet/pods/413090e9-1b8c-43a1-9550-150f0baf022f/volumes" Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.247650 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44ca8382-9045-4817-b73f-3c885e446fab" path="/var/lib/kubelet/pods/44ca8382-9045-4817-b73f-3c885e446fab/volumes" Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.250143 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="466decd8-bc59-452e-8c91-03d08f776138" path="/var/lib/kubelet/pods/466decd8-bc59-452e-8c91-03d08f776138/volumes" Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.251090 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48c5632d-6e67-4014-9360-c8932146c432" path="/var/lib/kubelet/pods/48c5632d-6e67-4014-9360-c8932146c432/volumes" Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.251915 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ecba8db-afd9-4f76-b5f1-61acfb49bd68" path="/var/lib/kubelet/pods/5ecba8db-afd9-4f76-b5f1-61acfb49bd68/volumes" Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.252924 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61b770ca-7b59-4e86-a262-b1fb6ff4d0d0" path="/var/lib/kubelet/pods/61b770ca-7b59-4e86-a262-b1fb6ff4d0d0/volumes" Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.253483 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76a2d071-f2d2-418f-be6e-0488fa1dd3d8" path="/var/lib/kubelet/pods/76a2d071-f2d2-418f-be6e-0488fa1dd3d8/volumes" Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.254091 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f40e387-b913-4f1b-9055-cacff2507f9b" path="/var/lib/kubelet/pods/7f40e387-b913-4f1b-9055-cacff2507f9b/volumes" Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.255161 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80814725-2ccf-4f3c-9585-8cdbeb83c2b5" path="/var/lib/kubelet/pods/80814725-2ccf-4f3c-9585-8cdbeb83c2b5/volumes" Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.256267 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="848bfccf-b69f-4c76-8ae1-2dd0e7203cb8" path="/var/lib/kubelet/pods/848bfccf-b69f-4c76-8ae1-2dd0e7203cb8/volumes" Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.256909 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="998d0af4-dd1e-48b0-9b87-c142eb5949f2" path="/var/lib/kubelet/pods/998d0af4-dd1e-48b0-9b87-c142eb5949f2/volumes" Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.257566 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b116442d-9126-417b-a8cf-b36c70966e46" path="/var/lib/kubelet/pods/b116442d-9126-417b-a8cf-b36c70966e46/volumes" Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.264078 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d55e229e-1e13-4cf5-9b86-32122704fa72" path="/var/lib/kubelet/pods/d55e229e-1e13-4cf5-9b86-32122704fa72/volumes" Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.265187 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db3f3820-1e34-4ad4-bb89-b587355526a6" path="/var/lib/kubelet/pods/db3f3820-1e34-4ad4-bb89-b587355526a6/volumes" Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.266947 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-65d8584cdc-bgnk8"] Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.266995 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.267014 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-9fb998c86-5qb5j"] Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.267029 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-70de-account-create-update-9sbz8"] Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.269152 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-9fb998c86-5qb5j" podUID="74632136-6311-4daa-80c7-4c32c20d6a4a" containerName="barbican-api-log" containerID="cri-o://5c3a077c91d9559f388be79a584f5f0cc2987551bdedbe1808365a487d0f5ea3" gracePeriod=30 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.269519 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-65d8584cdc-bgnk8" podUID="104ca851-1c21-41bd-8a92-423fdab83753" containerName="barbican-worker-log" containerID="cri-o://c1377e5a89b886398734afab4df613f251b21e463da188354d9a29304432a1e2" gracePeriod=30 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.269649 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-6l5cw"] Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.269827 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-9fb998c86-5qb5j" podUID="74632136-6311-4daa-80c7-4c32c20d6a4a" containerName="barbican-api" containerID="cri-o://92f17e6ef177f1d7c2a6e4d1a20a973d7e9064773ac8ae9ff622cf49961a940b" gracePeriod=30 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.270488 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-65d8584cdc-bgnk8" podUID="104ca851-1c21-41bd-8a92-423fdab83753" containerName="barbican-worker" containerID="cri-o://21d82ca792ac006fe155b7cbdd156ead01161c135d9e86487b0d642dfa345111" gracePeriod=30 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.287767 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-6l5cw"] Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.319655 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.320040 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="2ae8a334-b758-420e-8aae-a3f6437f9816" containerName="glance-log" containerID="cri-o://c8372e95ef80d324e58700717c686cf6517c90804f250faf3d73bbe912f0a6a3" gracePeriod=30 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.320286 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="2ae8a334-b758-420e-8aae-a3f6437f9816" containerName="glance-httpd" containerID="cri-o://75cb85d47c4f23763e64b6970bb9222234b6d481a8bbac78888a76d4dd1f8613" gracePeriod=30 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.336061 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.336370 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="f8f9cd63-d585-4053-b25b-3c0947f43755" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://80fd0e1a128b125455b0f3582efabeca4bc9e7c9682db967e54b192f2d8a8aa0" gracePeriod=30 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.347755 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="4d1e4183-a136-428f-9bd8-e857a603da8f" containerName="rabbitmq" containerID="cri-o://3fc1c2d02d30cc153ef6285ef6f4336e2c23b7fb060a908a71d5a2f75ee1261a" gracePeriod=604800 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.374879 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-gclj6"] Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.419231 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-46cwq"] Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.473681 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-gclj6"] Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.482771 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-46cwq"] Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.494767 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-86a6-account-create-update-ch52c"] Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.502285 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.502565 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="0f954356-d9a2-4183-9033-adf859e722e4" containerName="nova-scheduler-scheduler" containerID="cri-o://2252e68fa2bb62b190cb42ee1412af4d52c8c7aad25244c365965ac8e5919d92" gracePeriod=30 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.516341 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.516537 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="ba3b2f08-608c-49db-b58c-f20480a51bba" containerName="nova-cell0-conductor-conductor" containerID="cri-o://8df9d8157f152c454f2d5faa24504c9f3a81b6b09571d72c7ebea240aaa6074b" gracePeriod=30 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.536127 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-tfgnj"] Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.537191 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-jrjbc_333bf862-5ea9-43df-926f-5d8e463b2c80/openstack-network-exporter/0.log" Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.537230 4869 generic.go:334] "Generic (PLEG): container finished" podID="333bf862-5ea9-43df-926f-5d8e463b2c80" containerID="8dd5852d63fc179fcf40f8c40a3a27e1e4ca18f73ee8966299e8329c0ac8d776" exitCode=2 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.537296 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-jrjbc" event={"ID":"333bf862-5ea9-43df-926f-5d8e463b2c80","Type":"ContainerDied","Data":"8dd5852d63fc179fcf40f8c40a3a27e1e4ca18f73ee8966299e8329c0ac8d776"} Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.564365 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-tfgnj"] Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.566381 4869 generic.go:334] "Generic (PLEG): container finished" podID="e7503066-4e9b-410e-b83e-04ec6c2dc05c" containerID="c12e6d9a2497ce86f69e26eedbbe77a594f0dded08c3af2c1fa95dcc0378cc5e" exitCode=137 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.571274 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-s8qnc"] Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.587264 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_aefb9658-d09a-4e8d-9769-3d6133bd4b2c/ovsdbserver-sb/0.log" Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.587319 4869 generic.go:334] "Generic (PLEG): container finished" podID="aefb9658-d09a-4e8d-9769-3d6133bd4b2c" containerID="f645ebc573f7dd5869dcdf3ccfab2bce9e8305d65b43a7373e7b2cef92aec27f" exitCode=143 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.587430 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"aefb9658-d09a-4e8d-9769-3d6133bd4b2c","Type":"ContainerDied","Data":"f645ebc573f7dd5869dcdf3ccfab2bce9e8305d65b43a7373e7b2cef92aec27f"} Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.601011 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_286d79ce-b123-48b8-b8d1-9a1696fe00bb/ovsdbserver-nb/0.log" Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.601057 4869 generic.go:334] "Generic (PLEG): container finished" podID="286d79ce-b123-48b8-b8d1-9a1696fe00bb" containerID="ceb1a3a6706024f356a33ef4db537324ff7ad4fc04da03e91359bb65670ed582" exitCode=143 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.601110 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"286d79ce-b123-48b8-b8d1-9a1696fe00bb","Type":"ContainerDied","Data":"ceb1a3a6706024f356a33ef4db537324ff7ad4fc04da03e91359bb65670ed582"} Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.616035 4869 generic.go:334] "Generic (PLEG): container finished" podID="4f7d516c-1685-4033-891f-64008f56a468" containerID="5edd9111518ad32c4899c1b6d327c4bda2bc8c9521a53b6ca409e82447732919" exitCode=143 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.616597 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4f7d516c-1685-4033-891f-64008f56a468","Type":"ContainerDied","Data":"5edd9111518ad32c4899c1b6d327c4bda2bc8c9521a53b6ca409e82447732919"} Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.630600 4869 generic.go:334] "Generic (PLEG): container finished" podID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerID="65dd5e75cb2c7b06492f5521d624b519679d57118d8f7a7a48edaca957d584a1" exitCode=0 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.630636 4869 generic.go:334] "Generic (PLEG): container finished" podID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerID="13ba9f6f7912e14e6780c387ebc1a2663d74dda7b49e064fb9abd88ab5e57f99" exitCode=0 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.630644 4869 generic.go:334] "Generic (PLEG): container finished" podID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerID="f8504fd0ff5794faf6bacd1fef665e7a9cd6fedeedff24e1b282e17c56837bea" exitCode=0 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.630653 4869 generic.go:334] "Generic (PLEG): container finished" podID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerID="97e9034f3bb0c1adcd5ddc73729cfb8a5551a31bba376f2f3e02026b27ed1f80" exitCode=0 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.630660 4869 generic.go:334] "Generic (PLEG): container finished" podID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerID="20275096c286a0ae8845e55c14e269921083b740de5bce384f7ce1e846435e9b" exitCode=0 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.630667 4869 generic.go:334] "Generic (PLEG): container finished" podID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerID="b439885a7cc9ae304f7f36de7bcb53459f7f5b2bd2fc41ec4f6c9ebd8922553f" exitCode=0 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.630675 4869 generic.go:334] "Generic (PLEG): container finished" podID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerID="e88f5b9e87740566349e22c0ad0a51c60c75ada4b7cc83a0dac45fffd0379d49" exitCode=0 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.630684 4869 generic.go:334] "Generic (PLEG): container finished" podID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerID="124390ed6584fb765f5eaac9acb1121748399b48696c737116ce08db4dd7bfb0" exitCode=0 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.630691 4869 generic.go:334] "Generic (PLEG): container finished" podID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerID="9062f1d85cf44cdab858a6bee6da1f5afe762771c26d3b6e53a3b618a6533e24" exitCode=0 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.630698 4869 generic.go:334] "Generic (PLEG): container finished" podID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerID="59365e20746e6cb7518cf9197ed2d89e60bb59adac7d8138ec6dae90296333f2" exitCode=0 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.630720 4869 generic.go:334] "Generic (PLEG): container finished" podID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerID="f485c0dfd027365de561b952055dc9630a3eb742e8b7b5fbc78313ec86a5772e" exitCode=0 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.630830 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerDied","Data":"65dd5e75cb2c7b06492f5521d624b519679d57118d8f7a7a48edaca957d584a1"} Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.630862 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerDied","Data":"13ba9f6f7912e14e6780c387ebc1a2663d74dda7b49e064fb9abd88ab5e57f99"} Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.630872 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerDied","Data":"f8504fd0ff5794faf6bacd1fef665e7a9cd6fedeedff24e1b282e17c56837bea"} Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.630880 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerDied","Data":"97e9034f3bb0c1adcd5ddc73729cfb8a5551a31bba376f2f3e02026b27ed1f80"} Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.630888 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerDied","Data":"20275096c286a0ae8845e55c14e269921083b740de5bce384f7ce1e846435e9b"} Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.630896 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerDied","Data":"b439885a7cc9ae304f7f36de7bcb53459f7f5b2bd2fc41ec4f6c9ebd8922553f"} Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.630905 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerDied","Data":"e88f5b9e87740566349e22c0ad0a51c60c75ada4b7cc83a0dac45fffd0379d49"} Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.630914 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerDied","Data":"124390ed6584fb765f5eaac9acb1121748399b48696c737116ce08db4dd7bfb0"} Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.630921 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerDied","Data":"9062f1d85cf44cdab858a6bee6da1f5afe762771c26d3b6e53a3b618a6533e24"} Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.630932 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerDied","Data":"59365e20746e6cb7518cf9197ed2d89e60bb59adac7d8138ec6dae90296333f2"} Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.630940 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerDied","Data":"f485c0dfd027365de561b952055dc9630a3eb742e8b7b5fbc78313ec86a5772e"} Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.633784 4869 generic.go:334] "Generic (PLEG): container finished" podID="45d8f6aa-887f-444b-81c8-7bf6c03993c9" containerID="c09c667d7f6ac623e362575529ea4aab1bd220f1c6756e69b8ca1ef9977354ae" exitCode=143 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.633827 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-66466f9898-mzt77" event={"ID":"45d8f6aa-887f-444b-81c8-7bf6c03993c9","Type":"ContainerDied","Data":"c09c667d7f6ac623e362575529ea4aab1bd220f1c6756e69b8ca1ef9977354ae"} Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.635280 4869 generic.go:334] "Generic (PLEG): container finished" podID="104ca851-1c21-41bd-8a92-423fdab83753" containerID="c1377e5a89b886398734afab4df613f251b21e463da188354d9a29304432a1e2" exitCode=143 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.635323 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-65d8584cdc-bgnk8" event={"ID":"104ca851-1c21-41bd-8a92-423fdab83753","Type":"ContainerDied","Data":"c1377e5a89b886398734afab4df613f251b21e463da188354d9a29304432a1e2"} Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.637228 4869 generic.go:334] "Generic (PLEG): container finished" podID="c446ec70-c06c-4301-987c-423882ca1469" containerID="ac0db009767c9d20a2e2fbda72d982d36dc79b4ff0b0a3d504db1abca9f191e3" exitCode=0 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.637265 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" event={"ID":"c446ec70-c06c-4301-987c-423882ca1469","Type":"ContainerDied","Data":"ac0db009767c9d20a2e2fbda72d982d36dc79b4ff0b0a3d504db1abca9f191e3"} Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.652781 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-s8qnc"] Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.692081 4869 generic.go:334] "Generic (PLEG): container finished" podID="323f79a2-48c7-4768-8707-23bc31755a50" containerID="740a2738961798bcc0a9b14e30b355bacd116adc8c93775ec46f0050fa91974f" exitCode=143 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.692183 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-587f84cd84-zqhxn" event={"ID":"323f79a2-48c7-4768-8707-23bc31755a50","Type":"ContainerDied","Data":"740a2738961798bcc0a9b14e30b355bacd116adc8c93775ec46f0050fa91974f"} Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.692286 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-jfzdq" podUID="e4264086-12ed-4655-9657-14083653d56d" containerName="ovs-vswitchd" containerID="cri-o://9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021" gracePeriod=29 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.692357 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-cell1-galera-0" podUID="4618ceff-14a9-4866-aa22-e29767d8d7e4" containerName="galera" containerID="cri-o://ab6fefef94e67c7669176c91b03cf31715872e1eba1e24a159531ceb1c264993" gracePeriod=30 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.697858 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.698103 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="439024e7-e7a3-42c4-b9a1-db6705ec33d2" containerName="nova-cell1-conductor-conductor" containerID="cri-o://f1f67a718d23c87ea020c3a69713e73dad35f9fc50127281c32e30e68f5980e5" gracePeriod=30 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.703914 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.708930 4869 generic.go:334] "Generic (PLEG): container finished" podID="484ba6c3-20dc-4b27-b7f5-901eef0643a7" containerID="a5570db7baf6da4c91df79cef463e37a3e6477f10e7ccd7079f3bf311e981158" exitCode=0 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.709022 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-df6cb98f-8s46w" event={"ID":"484ba6c3-20dc-4b27-b7f5-901eef0643a7","Type":"ContainerDied","Data":"a5570db7baf6da4c91df79cef463e37a3e6477f10e7ccd7079f3bf311e981158"} Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.711006 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-zjpp9"] Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.711090 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d945c4ca-288d-4e49-9048-b66894b7e97f","Type":"ContainerDied","Data":"393a2e711fafbe8c8153de69713ef0feb5af6833a38be6f6203958c4d1bd909f"} Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.711021 4869 generic.go:334] "Generic (PLEG): container finished" podID="d945c4ca-288d-4e49-9048-b66894b7e97f" containerID="393a2e711fafbe8c8153de69713ef0feb5af6833a38be6f6203958c4d1bd909f" exitCode=143 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.739427 4869 generic.go:334] "Generic (PLEG): container finished" podID="65d95395-5aea-4546-b12a-ec8ce58ec704" containerID="f379626d704637c04266aaeddc7b56416c9104be8850dbe4859f262fe2550259" exitCode=143 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.739548 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"65d95395-5aea-4546-b12a-ec8ce58ec704","Type":"ContainerDied","Data":"f379626d704637c04266aaeddc7b56416c9104be8850dbe4859f262fe2550259"} Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.748717 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-7037-account-create-update-28qzk" event={"ID":"5a07399e-d252-46b3-823d-3fabceb4b671","Type":"ContainerStarted","Data":"0b3c2aa53e448a218994b086914a1894920c8af537076a775688de8b5628ce63"} Jan 30 11:16:08 crc kubenswrapper[4869]: E0130 11:16:08.782088 4869 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 30 11:16:08 crc kubenswrapper[4869]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[/bin/sh -c #!/bin/bash Jan 30 11:16:08 crc kubenswrapper[4869]: Jan 30 11:16:08 crc kubenswrapper[4869]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 30 11:16:08 crc kubenswrapper[4869]: Jan 30 11:16:08 crc kubenswrapper[4869]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 30 11:16:08 crc kubenswrapper[4869]: Jan 30 11:16:08 crc kubenswrapper[4869]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 30 11:16:08 crc kubenswrapper[4869]: Jan 30 11:16:08 crc kubenswrapper[4869]: if [ -n "cinder" ]; then Jan 30 11:16:08 crc kubenswrapper[4869]: GRANT_DATABASE="cinder" Jan 30 11:16:08 crc kubenswrapper[4869]: else Jan 30 11:16:08 crc kubenswrapper[4869]: GRANT_DATABASE="*" Jan 30 11:16:08 crc kubenswrapper[4869]: fi Jan 30 11:16:08 crc kubenswrapper[4869]: Jan 30 11:16:08 crc kubenswrapper[4869]: # going for maximum compatibility here: Jan 30 11:16:08 crc kubenswrapper[4869]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 30 11:16:08 crc kubenswrapper[4869]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 30 11:16:08 crc kubenswrapper[4869]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 30 11:16:08 crc kubenswrapper[4869]: # support updates Jan 30 11:16:08 crc kubenswrapper[4869]: Jan 30 11:16:08 crc kubenswrapper[4869]: $MYSQL_CMD < logger="UnhandledError" Jan 30 11:16:08 crc kubenswrapper[4869]: E0130 11:16:08.787073 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"cinder-db-secret\\\" not found\"" pod="openstack/cinder-7037-account-create-update-28qzk" podUID="5a07399e-d252-46b3-823d-3fabceb4b671" Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.816269 4869 generic.go:334] "Generic (PLEG): container finished" podID="ec34c29c-665f-465a-99d0-c342aca2cf14" containerID="ecbb01cd1bd2c82e2943bf2f2101e86a6978d1af67f8df56a057053148d323ab" exitCode=143 Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.816621 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ec34c29c-665f-465a-99d0-c342aca2cf14","Type":"ContainerDied","Data":"ecbb01cd1bd2c82e2943bf2f2101e86a6978d1af67f8df56a057053148d323ab"} Jan 30 11:16:08 crc kubenswrapper[4869]: E0130 11:16:08.960822 4869 secret.go:188] Couldn't get secret openstack/glance-scripts: secret "glance-scripts" not found Jan 30 11:16:08 crc kubenswrapper[4869]: E0130 11:16:08.962533 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-scripts podName:ec34c29c-665f-465a-99d0-c342aca2cf14 nodeName:}" failed. No retries permitted until 2026-01-30 11:16:10.962506895 +0000 UTC m=+1321.512382961 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-scripts") pod "glance-default-internal-api-0" (UID: "ec34c29c-665f-465a-99d0-c342aca2cf14") : secret "glance-scripts" not found Jan 30 11:16:08 crc kubenswrapper[4869]: E0130 11:16:08.960861 4869 secret.go:188] Couldn't get secret openstack/glance-default-internal-config-data: secret "glance-default-internal-config-data" not found Jan 30 11:16:08 crc kubenswrapper[4869]: E0130 11:16:08.963968 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-config-data podName:ec34c29c-665f-465a-99d0-c342aca2cf14 nodeName:}" failed. No retries permitted until 2026-01-30 11:16:10.963927466 +0000 UTC m=+1321.513803532 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-config-data") pod "glance-default-internal-api-0" (UID: "ec34c29c-665f-465a-99d0-c342aca2cf14") : secret "glance-default-internal-config-data" not found Jan 30 11:16:08 crc kubenswrapper[4869]: I0130 11:16:08.995847 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="15b1a123-3831-4fa6-bc52-3f0cf30953f9" containerName="rabbitmq" containerID="cri-o://0cb29fabcce8f4cf1036b218ffdfe4e6ecf114fa1c35dcd0949e31985511687e" gracePeriod=604800 Jan 30 11:16:08 crc kubenswrapper[4869]: E0130 11:16:08.999164 4869 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Jan 30 11:16:08 crc kubenswrapper[4869]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Jan 30 11:16:08 crc kubenswrapper[4869]: + source /usr/local/bin/container-scripts/functions Jan 30 11:16:08 crc kubenswrapper[4869]: ++ OVNBridge=br-int Jan 30 11:16:08 crc kubenswrapper[4869]: ++ OVNRemote=tcp:localhost:6642 Jan 30 11:16:08 crc kubenswrapper[4869]: ++ OVNEncapType=geneve Jan 30 11:16:08 crc kubenswrapper[4869]: ++ OVNAvailabilityZones= Jan 30 11:16:08 crc kubenswrapper[4869]: ++ EnableChassisAsGateway=true Jan 30 11:16:08 crc kubenswrapper[4869]: ++ PhysicalNetworks= Jan 30 11:16:08 crc kubenswrapper[4869]: ++ OVNHostName= Jan 30 11:16:08 crc kubenswrapper[4869]: ++ DB_FILE=/etc/openvswitch/conf.db Jan 30 11:16:08 crc kubenswrapper[4869]: ++ ovs_dir=/var/lib/openvswitch Jan 30 11:16:08 crc kubenswrapper[4869]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Jan 30 11:16:08 crc kubenswrapper[4869]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Jan 30 11:16:08 crc kubenswrapper[4869]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 30 11:16:08 crc kubenswrapper[4869]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 30 11:16:08 crc kubenswrapper[4869]: + sleep 0.5 Jan 30 11:16:08 crc kubenswrapper[4869]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 30 11:16:08 crc kubenswrapper[4869]: + sleep 0.5 Jan 30 11:16:08 crc kubenswrapper[4869]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 30 11:16:08 crc kubenswrapper[4869]: + cleanup_ovsdb_server_semaphore Jan 30 11:16:08 crc kubenswrapper[4869]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 30 11:16:08 crc kubenswrapper[4869]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Jan 30 11:16:08 crc kubenswrapper[4869]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack/ovn-controller-ovs-jfzdq" message=< Jan 30 11:16:08 crc kubenswrapper[4869]: Exiting ovsdb-server (5) ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Jan 30 11:16:08 crc kubenswrapper[4869]: + source /usr/local/bin/container-scripts/functions Jan 30 11:16:08 crc kubenswrapper[4869]: ++ OVNBridge=br-int Jan 30 11:16:08 crc kubenswrapper[4869]: ++ OVNRemote=tcp:localhost:6642 Jan 30 11:16:08 crc kubenswrapper[4869]: ++ OVNEncapType=geneve Jan 30 11:16:08 crc kubenswrapper[4869]: ++ OVNAvailabilityZones= Jan 30 11:16:08 crc kubenswrapper[4869]: ++ EnableChassisAsGateway=true Jan 30 11:16:08 crc kubenswrapper[4869]: ++ PhysicalNetworks= Jan 30 11:16:08 crc kubenswrapper[4869]: ++ OVNHostName= Jan 30 11:16:08 crc kubenswrapper[4869]: ++ DB_FILE=/etc/openvswitch/conf.db Jan 30 11:16:08 crc kubenswrapper[4869]: ++ ovs_dir=/var/lib/openvswitch Jan 30 11:16:08 crc kubenswrapper[4869]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Jan 30 11:16:08 crc kubenswrapper[4869]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Jan 30 11:16:08 crc kubenswrapper[4869]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 30 11:16:08 crc kubenswrapper[4869]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 30 11:16:08 crc kubenswrapper[4869]: + sleep 0.5 Jan 30 11:16:08 crc kubenswrapper[4869]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 30 11:16:08 crc kubenswrapper[4869]: + sleep 0.5 Jan 30 11:16:08 crc kubenswrapper[4869]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 30 11:16:08 crc kubenswrapper[4869]: + cleanup_ovsdb_server_semaphore Jan 30 11:16:08 crc kubenswrapper[4869]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 30 11:16:08 crc kubenswrapper[4869]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Jan 30 11:16:08 crc kubenswrapper[4869]: > Jan 30 11:16:08 crc kubenswrapper[4869]: E0130 11:16:08.999238 4869 kuberuntime_container.go:691] "PreStop hook failed" err=< Jan 30 11:16:08 crc kubenswrapper[4869]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Jan 30 11:16:08 crc kubenswrapper[4869]: + source /usr/local/bin/container-scripts/functions Jan 30 11:16:08 crc kubenswrapper[4869]: ++ OVNBridge=br-int Jan 30 11:16:08 crc kubenswrapper[4869]: ++ OVNRemote=tcp:localhost:6642 Jan 30 11:16:08 crc kubenswrapper[4869]: ++ OVNEncapType=geneve Jan 30 11:16:08 crc kubenswrapper[4869]: ++ OVNAvailabilityZones= Jan 30 11:16:08 crc kubenswrapper[4869]: ++ EnableChassisAsGateway=true Jan 30 11:16:08 crc kubenswrapper[4869]: ++ PhysicalNetworks= Jan 30 11:16:08 crc kubenswrapper[4869]: ++ OVNHostName= Jan 30 11:16:08 crc kubenswrapper[4869]: ++ DB_FILE=/etc/openvswitch/conf.db Jan 30 11:16:08 crc kubenswrapper[4869]: ++ ovs_dir=/var/lib/openvswitch Jan 30 11:16:08 crc kubenswrapper[4869]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Jan 30 11:16:08 crc kubenswrapper[4869]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Jan 30 11:16:08 crc kubenswrapper[4869]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 30 11:16:08 crc kubenswrapper[4869]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 30 11:16:08 crc kubenswrapper[4869]: + sleep 0.5 Jan 30 11:16:08 crc kubenswrapper[4869]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 30 11:16:08 crc kubenswrapper[4869]: + sleep 0.5 Jan 30 11:16:08 crc kubenswrapper[4869]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 30 11:16:08 crc kubenswrapper[4869]: + cleanup_ovsdb_server_semaphore Jan 30 11:16:08 crc kubenswrapper[4869]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 30 11:16:08 crc kubenswrapper[4869]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Jan 30 11:16:08 crc kubenswrapper[4869]: > pod="openstack/ovn-controller-ovs-jfzdq" podUID="e4264086-12ed-4655-9657-14083653d56d" containerName="ovsdb-server" containerID="cri-o://805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:08.999401 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-jfzdq" podUID="e4264086-12ed-4655-9657-14083653d56d" containerName="ovsdb-server" containerID="cri-o://805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c" gracePeriod=29 Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.000979 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-jrjbc_333bf862-5ea9-43df-926f-5d8e463b2c80/openstack-network-exporter/0.log" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.001066 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-jrjbc" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.033159 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_286d79ce-b123-48b8-b8d1-9a1696fe00bb/ovsdbserver-nb/0.log" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.033256 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.037206 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_aefb9658-d09a-4e8d-9769-3d6133bd4b2c/ovsdbserver-sb/0.log" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.037300 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.111011 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-70de-account-create-update-9sbz8"] Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.146766 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-86a6-account-create-update-ch52c"] Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.174965 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/286d79ce-b123-48b8-b8d1-9a1696fe00bb-scripts\") pod \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.175060 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/286d79ce-b123-48b8-b8d1-9a1696fe00bb-config\") pod \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.175100 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-config\") pod \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.175194 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-68kzn\" (UniqueName: \"kubernetes.io/projected/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-kube-api-access-68kzn\") pod \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.175217 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/333bf862-5ea9-43df-926f-5d8e463b2c80-ovs-rundir\") pod \"333bf862-5ea9-43df-926f-5d8e463b2c80\" (UID: \"333bf862-5ea9-43df-926f-5d8e463b2c80\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.175257 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-ovsdbserver-sb-tls-certs\") pod \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.175279 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/286d79ce-b123-48b8-b8d1-9a1696fe00bb-ovsdbserver-nb-tls-certs\") pod \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.175318 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.175344 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/333bf862-5ea9-43df-926f-5d8e463b2c80-config\") pod \"333bf862-5ea9-43df-926f-5d8e463b2c80\" (UID: \"333bf862-5ea9-43df-926f-5d8e463b2c80\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.175365 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lcvl\" (UniqueName: \"kubernetes.io/projected/333bf862-5ea9-43df-926f-5d8e463b2c80-kube-api-access-8lcvl\") pod \"333bf862-5ea9-43df-926f-5d8e463b2c80\" (UID: \"333bf862-5ea9-43df-926f-5d8e463b2c80\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.175420 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/333bf862-5ea9-43df-926f-5d8e463b2c80-combined-ca-bundle\") pod \"333bf862-5ea9-43df-926f-5d8e463b2c80\" (UID: \"333bf862-5ea9-43df-926f-5d8e463b2c80\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.175439 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/333bf862-5ea9-43df-926f-5d8e463b2c80-ovn-rundir\") pod \"333bf862-5ea9-43df-926f-5d8e463b2c80\" (UID: \"333bf862-5ea9-43df-926f-5d8e463b2c80\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.175476 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lqbrb\" (UniqueName: \"kubernetes.io/projected/286d79ce-b123-48b8-b8d1-9a1696fe00bb-kube-api-access-lqbrb\") pod \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.175551 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-ovsdb-rundir\") pod \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.175572 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/286d79ce-b123-48b8-b8d1-9a1696fe00bb-metrics-certs-tls-certs\") pod \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.175589 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-scripts\") pod \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.175629 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-metrics-certs-tls-certs\") pod \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.175665 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-combined-ca-bundle\") pod \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\" (UID: \"aefb9658-d09a-4e8d-9769-3d6133bd4b2c\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.175689 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.175754 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/286d79ce-b123-48b8-b8d1-9a1696fe00bb-ovsdb-rundir\") pod \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.175838 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/333bf862-5ea9-43df-926f-5d8e463b2c80-metrics-certs-tls-certs\") pod \"333bf862-5ea9-43df-926f-5d8e463b2c80\" (UID: \"333bf862-5ea9-43df-926f-5d8e463b2c80\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.175855 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/286d79ce-b123-48b8-b8d1-9a1696fe00bb-combined-ca-bundle\") pod \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\" (UID: \"286d79ce-b123-48b8-b8d1-9a1696fe00bb\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.176066 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/286d79ce-b123-48b8-b8d1-9a1696fe00bb-scripts" (OuterVolumeSpecName: "scripts") pod "286d79ce-b123-48b8-b8d1-9a1696fe00bb" (UID: "286d79ce-b123-48b8-b8d1-9a1696fe00bb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.176795 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/286d79ce-b123-48b8-b8d1-9a1696fe00bb-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.176841 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/333bf862-5ea9-43df-926f-5d8e463b2c80-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "333bf862-5ea9-43df-926f-5d8e463b2c80" (UID: "333bf862-5ea9-43df-926f-5d8e463b2c80"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.177634 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/333bf862-5ea9-43df-926f-5d8e463b2c80-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "333bf862-5ea9-43df-926f-5d8e463b2c80" (UID: "333bf862-5ea9-43df-926f-5d8e463b2c80"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.177995 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-config" (OuterVolumeSpecName: "config") pod "aefb9658-d09a-4e8d-9769-3d6133bd4b2c" (UID: "aefb9658-d09a-4e8d-9769-3d6133bd4b2c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.178020 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/286d79ce-b123-48b8-b8d1-9a1696fe00bb-config" (OuterVolumeSpecName: "config") pod "286d79ce-b123-48b8-b8d1-9a1696fe00bb" (UID: "286d79ce-b123-48b8-b8d1-9a1696fe00bb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.178198 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "aefb9658-d09a-4e8d-9769-3d6133bd4b2c" (UID: "aefb9658-d09a-4e8d-9769-3d6133bd4b2c"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.182700 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/286d79ce-b123-48b8-b8d1-9a1696fe00bb-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "286d79ce-b123-48b8-b8d1-9a1696fe00bb" (UID: "286d79ce-b123-48b8-b8d1-9a1696fe00bb"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.183848 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-scripts" (OuterVolumeSpecName: "scripts") pod "aefb9658-d09a-4e8d-9769-3d6133bd4b2c" (UID: "aefb9658-d09a-4e8d-9769-3d6133bd4b2c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.184550 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/333bf862-5ea9-43df-926f-5d8e463b2c80-config" (OuterVolumeSpecName: "config") pod "333bf862-5ea9-43df-926f-5d8e463b2c80" (UID: "333bf862-5ea9-43df-926f-5d8e463b2c80"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: E0130 11:16:09.189657 4869 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 30 11:16:09 crc kubenswrapper[4869]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[/bin/sh -c #!/bin/bash Jan 30 11:16:09 crc kubenswrapper[4869]: Jan 30 11:16:09 crc kubenswrapper[4869]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 30 11:16:09 crc kubenswrapper[4869]: Jan 30 11:16:09 crc kubenswrapper[4869]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 30 11:16:09 crc kubenswrapper[4869]: Jan 30 11:16:09 crc kubenswrapper[4869]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 30 11:16:09 crc kubenswrapper[4869]: Jan 30 11:16:09 crc kubenswrapper[4869]: if [ -n "barbican" ]; then Jan 30 11:16:09 crc kubenswrapper[4869]: GRANT_DATABASE="barbican" Jan 30 11:16:09 crc kubenswrapper[4869]: else Jan 30 11:16:09 crc kubenswrapper[4869]: GRANT_DATABASE="*" Jan 30 11:16:09 crc kubenswrapper[4869]: fi Jan 30 11:16:09 crc kubenswrapper[4869]: Jan 30 11:16:09 crc kubenswrapper[4869]: # going for maximum compatibility here: Jan 30 11:16:09 crc kubenswrapper[4869]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 30 11:16:09 crc kubenswrapper[4869]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 30 11:16:09 crc kubenswrapper[4869]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 30 11:16:09 crc kubenswrapper[4869]: # support updates Jan 30 11:16:09 crc kubenswrapper[4869]: Jan 30 11:16:09 crc kubenswrapper[4869]: $MYSQL_CMD < logger="UnhandledError" Jan 30 11:16:09 crc kubenswrapper[4869]: E0130 11:16:09.190152 4869 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 30 11:16:09 crc kubenswrapper[4869]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[/bin/sh -c #!/bin/bash Jan 30 11:16:09 crc kubenswrapper[4869]: Jan 30 11:16:09 crc kubenswrapper[4869]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 30 11:16:09 crc kubenswrapper[4869]: Jan 30 11:16:09 crc kubenswrapper[4869]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 30 11:16:09 crc kubenswrapper[4869]: Jan 30 11:16:09 crc kubenswrapper[4869]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 30 11:16:09 crc kubenswrapper[4869]: Jan 30 11:16:09 crc kubenswrapper[4869]: if [ -n "nova_api" ]; then Jan 30 11:16:09 crc kubenswrapper[4869]: GRANT_DATABASE="nova_api" Jan 30 11:16:09 crc kubenswrapper[4869]: else Jan 30 11:16:09 crc kubenswrapper[4869]: GRANT_DATABASE="*" Jan 30 11:16:09 crc kubenswrapper[4869]: fi Jan 30 11:16:09 crc kubenswrapper[4869]: Jan 30 11:16:09 crc kubenswrapper[4869]: # going for maximum compatibility here: Jan 30 11:16:09 crc kubenswrapper[4869]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 30 11:16:09 crc kubenswrapper[4869]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 30 11:16:09 crc kubenswrapper[4869]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 30 11:16:09 crc kubenswrapper[4869]: # support updates Jan 30 11:16:09 crc kubenswrapper[4869]: Jan 30 11:16:09 crc kubenswrapper[4869]: $MYSQL_CMD < logger="UnhandledError" Jan 30 11:16:09 crc kubenswrapper[4869]: E0130 11:16:09.191308 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"barbican-db-secret\\\" not found\"" pod="openstack/barbican-70de-account-create-update-9sbz8" podUID="48318921-34ba-442b-b9f0-6f7057d5cdf5" Jan 30 11:16:09 crc kubenswrapper[4869]: E0130 11:16:09.191359 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-api-db-secret\\\" not found\"" pod="openstack/nova-api-86a6-account-create-update-ch52c" podUID="b9320a43-d34c-4f43-b304-8f6414a44b33" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.199996 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "aefb9658-d09a-4e8d-9769-3d6133bd4b2c" (UID: "aefb9658-d09a-4e8d-9769-3d6133bd4b2c"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.200382 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-kube-api-access-68kzn" (OuterVolumeSpecName: "kube-api-access-68kzn") pod "aefb9658-d09a-4e8d-9769-3d6133bd4b2c" (UID: "aefb9658-d09a-4e8d-9769-3d6133bd4b2c"). InnerVolumeSpecName "kube-api-access-68kzn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.202895 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "286d79ce-b123-48b8-b8d1-9a1696fe00bb" (UID: "286d79ce-b123-48b8-b8d1-9a1696fe00bb"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.207943 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/286d79ce-b123-48b8-b8d1-9a1696fe00bb-kube-api-access-lqbrb" (OuterVolumeSpecName: "kube-api-access-lqbrb") pod "286d79ce-b123-48b8-b8d1-9a1696fe00bb" (UID: "286d79ce-b123-48b8-b8d1-9a1696fe00bb"). InnerVolumeSpecName "kube-api-access-lqbrb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.211305 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/333bf862-5ea9-43df-926f-5d8e463b2c80-kube-api-access-8lcvl" (OuterVolumeSpecName: "kube-api-access-8lcvl") pod "333bf862-5ea9-43df-926f-5d8e463b2c80" (UID: "333bf862-5ea9-43df-926f-5d8e463b2c80"). InnerVolumeSpecName "kube-api-access-8lcvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.227831 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.275125 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.282235 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.282264 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.282284 4869 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.282294 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/286d79ce-b123-48b8-b8d1-9a1696fe00bb-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.282304 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/286d79ce-b123-48b8-b8d1-9a1696fe00bb-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.282312 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.282320 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-68kzn\" (UniqueName: \"kubernetes.io/projected/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-kube-api-access-68kzn\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.282330 4869 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/333bf862-5ea9-43df-926f-5d8e463b2c80-ovs-rundir\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.282343 4869 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.282351 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/333bf862-5ea9-43df-926f-5d8e463b2c80-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.282361 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lcvl\" (UniqueName: \"kubernetes.io/projected/333bf862-5ea9-43df-926f-5d8e463b2c80-kube-api-access-8lcvl\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.282370 4869 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/333bf862-5ea9-43df-926f-5d8e463b2c80-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.282377 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lqbrb\" (UniqueName: \"kubernetes.io/projected/286d79ce-b123-48b8-b8d1-9a1696fe00bb-kube-api-access-lqbrb\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: E0130 11:16:09.286427 4869 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 30 11:16:09 crc kubenswrapper[4869]: E0130 11:16:09.286505 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-config-data podName:15b1a123-3831-4fa6-bc52-3f0cf30953f9 nodeName:}" failed. No retries permitted until 2026-01-30 11:16:11.286484467 +0000 UTC m=+1321.836360573 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-config-data") pod "rabbitmq-cell1-server-0" (UID: "15b1a123-3831-4fa6-bc52-3f0cf30953f9") : configmap "rabbitmq-cell1-config-data" not found Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.288884 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aefb9658-d09a-4e8d-9769-3d6133bd4b2c" (UID: "aefb9658-d09a-4e8d-9769-3d6133bd4b2c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.297641 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/333bf862-5ea9-43df-926f-5d8e463b2c80-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "333bf862-5ea9-43df-926f-5d8e463b2c80" (UID: "333bf862-5ea9-43df-926f-5d8e463b2c80"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.335977 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/286d79ce-b123-48b8-b8d1-9a1696fe00bb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "286d79ce-b123-48b8-b8d1-9a1696fe00bb" (UID: "286d79ce-b123-48b8-b8d1-9a1696fe00bb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.345346 4869 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.383336 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e7503066-4e9b-410e-b83e-04ec6c2dc05c-openstack-config\") pod \"e7503066-4e9b-410e-b83e-04ec6c2dc05c\" (UID: \"e7503066-4e9b-410e-b83e-04ec6c2dc05c\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.383406 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-config\") pod \"c446ec70-c06c-4301-987c-423882ca1469\" (UID: \"c446ec70-c06c-4301-987c-423882ca1469\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.383490 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-dns-svc\") pod \"c446ec70-c06c-4301-987c-423882ca1469\" (UID: \"c446ec70-c06c-4301-987c-423882ca1469\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.383530 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-29lc9\" (UniqueName: \"kubernetes.io/projected/c446ec70-c06c-4301-987c-423882ca1469-kube-api-access-29lc9\") pod \"c446ec70-c06c-4301-987c-423882ca1469\" (UID: \"c446ec70-c06c-4301-987c-423882ca1469\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.383611 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7503066-4e9b-410e-b83e-04ec6c2dc05c-combined-ca-bundle\") pod \"e7503066-4e9b-410e-b83e-04ec6c2dc05c\" (UID: \"e7503066-4e9b-410e-b83e-04ec6c2dc05c\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.383647 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e7503066-4e9b-410e-b83e-04ec6c2dc05c-openstack-config-secret\") pod \"e7503066-4e9b-410e-b83e-04ec6c2dc05c\" (UID: \"e7503066-4e9b-410e-b83e-04ec6c2dc05c\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.383662 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-ovsdbserver-nb\") pod \"c446ec70-c06c-4301-987c-423882ca1469\" (UID: \"c446ec70-c06c-4301-987c-423882ca1469\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.383730 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-dns-swift-storage-0\") pod \"c446ec70-c06c-4301-987c-423882ca1469\" (UID: \"c446ec70-c06c-4301-987c-423882ca1469\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.383766 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m2nh6\" (UniqueName: \"kubernetes.io/projected/e7503066-4e9b-410e-b83e-04ec6c2dc05c-kube-api-access-m2nh6\") pod \"e7503066-4e9b-410e-b83e-04ec6c2dc05c\" (UID: \"e7503066-4e9b-410e-b83e-04ec6c2dc05c\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.383800 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-ovsdbserver-sb\") pod \"c446ec70-c06c-4301-987c-423882ca1469\" (UID: \"c446ec70-c06c-4301-987c-423882ca1469\") " Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.384276 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/286d79ce-b123-48b8-b8d1-9a1696fe00bb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.384295 4869 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.384307 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/333bf862-5ea9-43df-926f-5d8e463b2c80-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.384316 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.434551 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7503066-4e9b-410e-b83e-04ec6c2dc05c-kube-api-access-m2nh6" (OuterVolumeSpecName: "kube-api-access-m2nh6") pod "e7503066-4e9b-410e-b83e-04ec6c2dc05c" (UID: "e7503066-4e9b-410e-b83e-04ec6c2dc05c"). InnerVolumeSpecName "kube-api-access-m2nh6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.441121 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c446ec70-c06c-4301-987c-423882ca1469-kube-api-access-29lc9" (OuterVolumeSpecName: "kube-api-access-29lc9") pod "c446ec70-c06c-4301-987c-423882ca1469" (UID: "c446ec70-c06c-4301-987c-423882ca1469"). InnerVolumeSpecName "kube-api-access-29lc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.465629 4869 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.472646 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/333bf862-5ea9-43df-926f-5d8e463b2c80-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "333bf862-5ea9-43df-926f-5d8e463b2c80" (UID: "333bf862-5ea9-43df-926f-5d8e463b2c80"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: E0130 11:16:09.499053 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f1f67a718d23c87ea020c3a69713e73dad35f9fc50127281c32e30e68f5980e5" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 30 11:16:09 crc kubenswrapper[4869]: E0130 11:16:09.501015 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f1f67a718d23c87ea020c3a69713e73dad35f9fc50127281c32e30e68f5980e5" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 30 11:16:09 crc kubenswrapper[4869]: E0130 11:16:09.509375 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f1f67a718d23c87ea020c3a69713e73dad35f9fc50127281c32e30e68f5980e5" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 30 11:16:09 crc kubenswrapper[4869]: E0130 11:16:09.509915 4869 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="439024e7-e7a3-42c4-b9a1-db6705ec33d2" containerName="nova-cell1-conductor-conductor" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.516648 4869 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.516673 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-29lc9\" (UniqueName: \"kubernetes.io/projected/c446ec70-c06c-4301-987c-423882ca1469-kube-api-access-29lc9\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.516686 4869 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/333bf862-5ea9-43df-926f-5d8e463b2c80-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.516727 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m2nh6\" (UniqueName: \"kubernetes.io/projected/e7503066-4e9b-410e-b83e-04ec6c2dc05c-kube-api-access-m2nh6\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.522109 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7503066-4e9b-410e-b83e-04ec6c2dc05c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e7503066-4e9b-410e-b83e-04ec6c2dc05c" (UID: "e7503066-4e9b-410e-b83e-04ec6c2dc05c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.639155 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7503066-4e9b-410e-b83e-04ec6c2dc05c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: E0130 11:16:09.639347 4869 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 30 11:16:09 crc kubenswrapper[4869]: E0130 11:16:09.639568 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-config-data podName:4d1e4183-a136-428f-9bd8-e857a603da8f nodeName:}" failed. No retries permitted until 2026-01-30 11:16:13.639547846 +0000 UTC m=+1324.189423912 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-config-data") pod "rabbitmq-server-0" (UID: "4d1e4183-a136-428f-9bd8-e857a603da8f") : configmap "rabbitmq-config-data" not found Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.692611 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7503066-4e9b-410e-b83e-04ec6c2dc05c-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "e7503066-4e9b-410e-b83e-04ec6c2dc05c" (UID: "e7503066-4e9b-410e-b83e-04ec6c2dc05c"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.696966 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c446ec70-c06c-4301-987c-423882ca1469" (UID: "c446ec70-c06c-4301-987c-423882ca1469"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.717108 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "aefb9658-d09a-4e8d-9769-3d6133bd4b2c" (UID: "aefb9658-d09a-4e8d-9769-3d6133bd4b2c"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.718646 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7503066-4e9b-410e-b83e-04ec6c2dc05c-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "e7503066-4e9b-410e-b83e-04ec6c2dc05c" (UID: "e7503066-4e9b-410e-b83e-04ec6c2dc05c"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.746153 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/286d79ce-b123-48b8-b8d1-9a1696fe00bb-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "286d79ce-b123-48b8-b8d1-9a1696fe00bb" (UID: "286d79ce-b123-48b8-b8d1-9a1696fe00bb"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.755839 4869 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e7503066-4e9b-410e-b83e-04ec6c2dc05c-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.755874 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.755883 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/286d79ce-b123-48b8-b8d1-9a1696fe00bb-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.755893 4869 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e7503066-4e9b-410e-b83e-04ec6c2dc05c-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.755903 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.758865 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c446ec70-c06c-4301-987c-423882ca1469" (UID: "c446ec70-c06c-4301-987c-423882ca1469"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.758887 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/286d79ce-b123-48b8-b8d1-9a1696fe00bb-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "286d79ce-b123-48b8-b8d1-9a1696fe00bb" (UID: "286d79ce-b123-48b8-b8d1-9a1696fe00bb"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.796313 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c446ec70-c06c-4301-987c-423882ca1469" (UID: "c446ec70-c06c-4301-987c-423882ca1469"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.796441 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "aefb9658-d09a-4e8d-9769-3d6133bd4b2c" (UID: "aefb9658-d09a-4e8d-9769-3d6133bd4b2c"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.813227 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c446ec70-c06c-4301-987c-423882ca1469" (UID: "c446ec70-c06c-4301-987c-423882ca1469"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.857919 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.857964 4869 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/286d79ce-b123-48b8-b8d1-9a1696fe00bb-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.858014 4869 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/aefb9658-d09a-4e8d-9769-3d6133bd4b2c-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.858027 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.858036 4869 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.898180 4869 generic.go:334] "Generic (PLEG): container finished" podID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerID="6f9aa9147f317463724ec1dff3a40f0f2085d959d4963346ddc72ee9e85fd348" exitCode=0 Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.898214 4869 generic.go:334] "Generic (PLEG): container finished" podID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerID="5ef059d18651368542240591ca6f5fe4c03b5e8aa1d605e33f266d5c65f87088" exitCode=0 Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.898224 4869 generic.go:334] "Generic (PLEG): container finished" podID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerID="e8224a2745bbe075dafb160da11e1dafa60447cd322ab3064698b3cb694f996d" exitCode=0 Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.898295 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerDied","Data":"6f9aa9147f317463724ec1dff3a40f0f2085d959d4963346ddc72ee9e85fd348"} Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.898327 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerDied","Data":"5ef059d18651368542240591ca6f5fe4c03b5e8aa1d605e33f266d5c65f87088"} Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.898339 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerDied","Data":"e8224a2745bbe075dafb160da11e1dafa60447cd322ab3064698b3cb694f996d"} Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.907486 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-config" (OuterVolumeSpecName: "config") pod "c446ec70-c06c-4301-987c-423882ca1469" (UID: "c446ec70-c06c-4301-987c-423882ca1469"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.909577 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_286d79ce-b123-48b8-b8d1-9a1696fe00bb/ovsdbserver-nb/0.log" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.909695 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"286d79ce-b123-48b8-b8d1-9a1696fe00bb","Type":"ContainerDied","Data":"d10c9c52d9b87e2ebc1d2082b80b0003e504e9b90cd526415350d7f02616abed"} Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.909764 4869 scope.go:117] "RemoveContainer" containerID="9a8f8895d0bd2c0f894fad76153cef03bee6e3dab153bccb600a99368ebe01e6" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.909928 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.925824 4869 generic.go:334] "Generic (PLEG): container finished" podID="e4264086-12ed-4655-9657-14083653d56d" containerID="805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c" exitCode=0 Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.925937 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-jfzdq" event={"ID":"e4264086-12ed-4655-9657-14083653d56d","Type":"ContainerDied","Data":"805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c"} Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.928292 4869 generic.go:334] "Generic (PLEG): container finished" podID="f5b9f902-0038-4057-b1c2-66222926c1b5" containerID="04cb5cbbd47e7e666f5e169d92064f3b480d2b19c91ebb32ff908f9eaa6eacd0" exitCode=1 Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.928360 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-zjpp9" event={"ID":"f5b9f902-0038-4057-b1c2-66222926c1b5","Type":"ContainerDied","Data":"04cb5cbbd47e7e666f5e169d92064f3b480d2b19c91ebb32ff908f9eaa6eacd0"} Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.928378 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-zjpp9" event={"ID":"f5b9f902-0038-4057-b1c2-66222926c1b5","Type":"ContainerStarted","Data":"2aa4014e3720acc1e48b4d2149c02f5fb50fbaf7ba54b5987c76d2a7e243a2e7"} Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.929732 4869 scope.go:117] "RemoveContainer" containerID="04cb5cbbd47e7e666f5e169d92064f3b480d2b19c91ebb32ff908f9eaa6eacd0" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.955931 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 30 11:16:09 crc kubenswrapper[4869]: I0130 11:16:09.966385 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c446ec70-c06c-4301-987c-423882ca1469-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.000971 4869 generic.go:334] "Generic (PLEG): container finished" podID="45d8f6aa-887f-444b-81c8-7bf6c03993c9" containerID="e9752fd0d18f235bdd601cdc37759bd12b8f72d28d609ba99e7c988552e2f109" exitCode=0 Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.001082 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-66466f9898-mzt77" event={"ID":"45d8f6aa-887f-444b-81c8-7bf6c03993c9","Type":"ContainerDied","Data":"e9752fd0d18f235bdd601cdc37759bd12b8f72d28d609ba99e7c988552e2f109"} Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.003661 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-70de-account-create-update-9sbz8" event={"ID":"48318921-34ba-442b-b9f0-6f7057d5cdf5","Type":"ContainerStarted","Data":"45a82d988463a6192c4a79e192c1a6d2dd6d594b4393d574c865a9b1c86858c8"} Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.019578 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.020275 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-r98zg" event={"ID":"c446ec70-c06c-4301-987c-423882ca1469","Type":"ContainerDied","Data":"0e92cac8745e164845a2872a8982b9ab3d928d8d308092cb8e265a5afd40fc6c"} Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.033386 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-66466f9898-mzt77" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.034139 4869 generic.go:334] "Generic (PLEG): container finished" podID="4618ceff-14a9-4866-aa22-e29767d8d7e4" containerID="ab6fefef94e67c7669176c91b03cf31715872e1eba1e24a159531ceb1c264993" exitCode=0 Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.034215 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4618ceff-14a9-4866-aa22-e29767d8d7e4","Type":"ContainerDied","Data":"ab6fefef94e67c7669176c91b03cf31715872e1eba1e24a159531ceb1c264993"} Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.049432 4869 scope.go:117] "RemoveContainer" containerID="ceb1a3a6706024f356a33ef4db537324ff7ad4fc04da03e91359bb65670ed582" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.049472 4869 generic.go:334] "Generic (PLEG): container finished" podID="161960a2-9537-4f72-913b-54b23f2b4be7" containerID="12100bca04bc598644a6f4f21b6886c14fe32934d7b28768c12d907536df5008" exitCode=0 Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.049628 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"161960a2-9537-4f72-913b-54b23f2b4be7","Type":"ContainerDied","Data":"12100bca04bc598644a6f4f21b6886c14fe32934d7b28768c12d907536df5008"} Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.091805 4869 generic.go:334] "Generic (PLEG): container finished" podID="2ae8a334-b758-420e-8aae-a3f6437f9816" containerID="c8372e95ef80d324e58700717c686cf6517c90804f250faf3d73bbe912f0a6a3" exitCode=143 Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.092012 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2ae8a334-b758-420e-8aae-a3f6437f9816","Type":"ContainerDied","Data":"c8372e95ef80d324e58700717c686cf6517c90804f250faf3d73bbe912f0a6a3"} Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.107328 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-86a6-account-create-update-ch52c" event={"ID":"b9320a43-d34c-4f43-b304-8f6414a44b33","Type":"ContainerStarted","Data":"b1378dcb5d8617ca2d069b38cc24a3920e54b6791f959f0a7314ff01c2aed810"} Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.110774 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.132161 4869 generic.go:334] "Generic (PLEG): container finished" podID="104ca851-1c21-41bd-8a92-423fdab83753" containerID="21d82ca792ac006fe155b7cbdd156ead01161c135d9e86487b0d642dfa345111" exitCode=0 Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.132289 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-65d8584cdc-bgnk8" event={"ID":"104ca851-1c21-41bd-8a92-423fdab83753","Type":"ContainerDied","Data":"21d82ca792ac006fe155b7cbdd156ead01161c135d9e86487b0d642dfa345111"} Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.147628 4869 generic.go:334] "Generic (PLEG): container finished" podID="e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f" containerID="30e76731331681ee54fc2b12405950fb45c6d15c16f3d7f16a01c29ca55daa7e" exitCode=0 Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.147665 4869 generic.go:334] "Generic (PLEG): container finished" podID="e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f" containerID="e05f057e1226a4ecd5362991160142f5969097d16d333ab689e4639d3978f4f8" exitCode=0 Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.161331 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-jrjbc_333bf862-5ea9-43df-926f-5d8e463b2c80/openstack-network-exporter/0.log" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.161560 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-jrjbc" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.169222 4869 generic.go:334] "Generic (PLEG): container finished" podID="74632136-6311-4daa-80c7-4c32c20d6a4a" containerID="5c3a077c91d9559f388be79a584f5f0cc2987551bdedbe1808365a487d0f5ea3" exitCode=143 Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.193371 4869 scope.go:117] "RemoveContainer" containerID="c12e6d9a2497ce86f69e26eedbbe77a594f0dded08c3af2c1fa95dcc0378cc5e" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.194382 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t8gbk\" (UniqueName: \"kubernetes.io/projected/45d8f6aa-887f-444b-81c8-7bf6c03993c9-kube-api-access-t8gbk\") pod \"45d8f6aa-887f-444b-81c8-7bf6c03993c9\" (UID: \"45d8f6aa-887f-444b-81c8-7bf6c03993c9\") " Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.194483 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/45d8f6aa-887f-444b-81c8-7bf6c03993c9-config-data-custom\") pod \"45d8f6aa-887f-444b-81c8-7bf6c03993c9\" (UID: \"45d8f6aa-887f-444b-81c8-7bf6c03993c9\") " Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.194526 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45d8f6aa-887f-444b-81c8-7bf6c03993c9-combined-ca-bundle\") pod \"45d8f6aa-887f-444b-81c8-7bf6c03993c9\" (UID: \"45d8f6aa-887f-444b-81c8-7bf6c03993c9\") " Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.194587 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45d8f6aa-887f-444b-81c8-7bf6c03993c9-config-data\") pod \"45d8f6aa-887f-444b-81c8-7bf6c03993c9\" (UID: \"45d8f6aa-887f-444b-81c8-7bf6c03993c9\") " Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.194684 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/45d8f6aa-887f-444b-81c8-7bf6c03993c9-logs\") pod \"45d8f6aa-887f-444b-81c8-7bf6c03993c9\" (UID: \"45d8f6aa-887f-444b-81c8-7bf6c03993c9\") " Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.196183 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45d8f6aa-887f-444b-81c8-7bf6c03993c9-logs" (OuterVolumeSpecName: "logs") pod "45d8f6aa-887f-444b-81c8-7bf6c03993c9" (UID: "45d8f6aa-887f-444b-81c8-7bf6c03993c9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.196917 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_aefb9658-d09a-4e8d-9769-3d6133bd4b2c/ovsdbserver-sb/0.log" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.197434 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.204529 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45d8f6aa-887f-444b-81c8-7bf6c03993c9-kube-api-access-t8gbk" (OuterVolumeSpecName: "kube-api-access-t8gbk") pod "45d8f6aa-887f-444b-81c8-7bf6c03993c9" (UID: "45d8f6aa-887f-444b-81c8-7bf6c03993c9"). InnerVolumeSpecName "kube-api-access-t8gbk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.207960 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45d8f6aa-887f-444b-81c8-7bf6c03993c9-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "45d8f6aa-887f-444b-81c8-7bf6c03993c9" (UID: "45d8f6aa-887f-444b-81c8-7bf6c03993c9"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.210823 4869 generic.go:334] "Generic (PLEG): container finished" podID="f8f9cd63-d585-4053-b25b-3c0947f43755" containerID="80fd0e1a128b125455b0f3582efabeca4bc9e7c9682db967e54b192f2d8a8aa0" exitCode=0 Jan 30 11:16:10 crc kubenswrapper[4869]: E0130 11:16:10.239948 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2252e68fa2bb62b190cb42ee1412af4d52c8c7aad25244c365965ac8e5919d92 is running failed: container process not found" containerID="2252e68fa2bb62b190cb42ee1412af4d52c8c7aad25244c365965ac8e5919d92" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 30 11:16:10 crc kubenswrapper[4869]: E0130 11:16:10.240534 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2252e68fa2bb62b190cb42ee1412af4d52c8c7aad25244c365965ac8e5919d92 is running failed: container process not found" containerID="2252e68fa2bb62b190cb42ee1412af4d52c8c7aad25244c365965ac8e5919d92" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 30 11:16:10 crc kubenswrapper[4869]: E0130 11:16:10.246892 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2252e68fa2bb62b190cb42ee1412af4d52c8c7aad25244c365965ac8e5919d92 is running failed: container process not found" containerID="2252e68fa2bb62b190cb42ee1412af4d52c8c7aad25244c365965ac8e5919d92" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 30 11:16:10 crc kubenswrapper[4869]: E0130 11:16:10.246964 4869 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2252e68fa2bb62b190cb42ee1412af4d52c8c7aad25244c365965ac8e5919d92 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="0f954356-d9a2-4183-9033-adf859e722e4" containerName="nova-scheduler-scheduler" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.252822 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45d8f6aa-887f-444b-81c8-7bf6c03993c9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "45d8f6aa-887f-444b-81c8-7bf6c03993c9" (UID: "45d8f6aa-887f-444b-81c8-7bf6c03993c9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.260478 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45d8f6aa-887f-444b-81c8-7bf6c03993c9-config-data" (OuterVolumeSpecName: "config-data") pod "45d8f6aa-887f-444b-81c8-7bf6c03993c9" (UID: "45d8f6aa-887f-444b-81c8-7bf6c03993c9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.297727 4869 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/45d8f6aa-887f-444b-81c8-7bf6c03993c9-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.297760 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45d8f6aa-887f-444b-81c8-7bf6c03993c9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.297771 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45d8f6aa-887f-444b-81c8-7bf6c03993c9-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.297910 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/45d8f6aa-887f-444b-81c8-7bf6c03993c9-logs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.297925 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t8gbk\" (UniqueName: \"kubernetes.io/projected/45d8f6aa-887f-444b-81c8-7bf6c03993c9-kube-api-access-t8gbk\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.425186 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0dc6721d-cb72-45ad-ad97-1c045d0bd2cb" path="/var/lib/kubelet/pods/0dc6721d-cb72-45ad-ad97-1c045d0bd2cb/volumes" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.425792 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="285e9203-3b0b-4a52-8464-1019a682fd9d" path="/var/lib/kubelet/pods/285e9203-3b0b-4a52-8464-1019a682fd9d/volumes" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.426481 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42a310a9-b061-4d7a-9644-5f1303fc5c15" path="/var/lib/kubelet/pods/42a310a9-b061-4d7a-9644-5f1303fc5c15/volumes" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.427033 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78dc88a3-3860-4c7f-acaf-5e2568a8761d" path="/var/lib/kubelet/pods/78dc88a3-3860-4c7f-acaf-5e2568a8761d/volumes" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.428063 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9120a9e7-f89e-4d11-8481-d352ffe17419" path="/var/lib/kubelet/pods/9120a9e7-f89e-4d11-8481-d352ffe17419/volumes" Jan 30 11:16:10 crc kubenswrapper[4869]: E0130 11:16:10.431103 4869 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4618ceff_14a9_4866_aa22_e29767d8d7e4.slice/crio-ab6fefef94e67c7669176c91b03cf31715872e1eba1e24a159531ceb1c264993.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod286d79ce_b123_48b8_b8d1_9a1696fe00bb.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode7503066_4e9b_410e_b83e_04ec6c2dc05c.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4618ceff_14a9_4866_aa22_e29767d8d7e4.slice/crio-conmon-ab6fefef94e67c7669176c91b03cf31715872e1eba1e24a159531ceb1c264993.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc446ec70_c06c_4301_987c_423882ca1469.slice/crio-0e92cac8745e164845a2872a8982b9ab3d928d8d308092cb8e265a5afd40fc6c\": RecentStats: unable to find data in memory cache]" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.435854 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7503066-4e9b-410e-b83e-04ec6c2dc05c" path="/var/lib/kubelet/pods/e7503066-4e9b-410e-b83e-04ec6c2dc05c/volumes" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.437242 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-644f9f48bf-ccrr2" event={"ID":"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f","Type":"ContainerDied","Data":"30e76731331681ee54fc2b12405950fb45c6d15c16f3d7f16a01c29ca55daa7e"} Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.437282 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-644f9f48bf-ccrr2" event={"ID":"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f","Type":"ContainerDied","Data":"e05f057e1226a4ecd5362991160142f5969097d16d333ab689e4639d3978f4f8"} Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.437309 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.437334 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-r98zg"] Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.437352 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-r98zg"] Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.437375 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-jrjbc" event={"ID":"333bf862-5ea9-43df-926f-5d8e463b2c80","Type":"ContainerDied","Data":"6fd34c93cb0396b2d042d97c363690853e0249d5664e9373de8228ced96fe825"} Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.437391 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-9fb998c86-5qb5j" event={"ID":"74632136-6311-4daa-80c7-4c32c20d6a4a","Type":"ContainerDied","Data":"5c3a077c91d9559f388be79a584f5f0cc2987551bdedbe1808365a487d0f5ea3"} Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.437411 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"aefb9658-d09a-4e8d-9769-3d6133bd4b2c","Type":"ContainerDied","Data":"3bf97457298a17a8e32c7781ad918e1cf1b20c3fbd5c9533c79c400ac324542f"} Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.437442 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"f8f9cd63-d585-4053-b25b-3c0947f43755","Type":"ContainerDied","Data":"80fd0e1a128b125455b0f3582efabeca4bc9e7c9682db967e54b192f2d8a8aa0"} Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.543839 4869 scope.go:117] "RemoveContainer" containerID="ac0db009767c9d20a2e2fbda72d982d36dc79b4ff0b0a3d504db1abca9f191e3" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.573429 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-65d8584cdc-bgnk8" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.574340 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.580940 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-jrjbc"] Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.590338 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-metrics-jrjbc"] Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.596451 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.597811 4869 scope.go:117] "RemoveContainer" containerID="96a5b1a816c46859aaf489cb9698ab2c6768310099b7bff27f10d01862c2418e" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.603044 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.714681 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5qws\" (UniqueName: \"kubernetes.io/projected/f8f9cd63-d585-4053-b25b-3c0947f43755-kube-api-access-f5qws\") pod \"f8f9cd63-d585-4053-b25b-3c0947f43755\" (UID: \"f8f9cd63-d585-4053-b25b-3c0947f43755\") " Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.714787 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/104ca851-1c21-41bd-8a92-423fdab83753-combined-ca-bundle\") pod \"104ca851-1c21-41bd-8a92-423fdab83753\" (UID: \"104ca851-1c21-41bd-8a92-423fdab83753\") " Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.714813 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2gwkm\" (UniqueName: \"kubernetes.io/projected/104ca851-1c21-41bd-8a92-423fdab83753-kube-api-access-2gwkm\") pod \"104ca851-1c21-41bd-8a92-423fdab83753\" (UID: \"104ca851-1c21-41bd-8a92-423fdab83753\") " Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.714876 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8f9cd63-d585-4053-b25b-3c0947f43755-combined-ca-bundle\") pod \"f8f9cd63-d585-4053-b25b-3c0947f43755\" (UID: \"f8f9cd63-d585-4053-b25b-3c0947f43755\") " Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.714920 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/104ca851-1c21-41bd-8a92-423fdab83753-config-data-custom\") pod \"104ca851-1c21-41bd-8a92-423fdab83753\" (UID: \"104ca851-1c21-41bd-8a92-423fdab83753\") " Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.714944 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8f9cd63-d585-4053-b25b-3c0947f43755-nova-novncproxy-tls-certs\") pod \"f8f9cd63-d585-4053-b25b-3c0947f43755\" (UID: \"f8f9cd63-d585-4053-b25b-3c0947f43755\") " Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.714971 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8f9cd63-d585-4053-b25b-3c0947f43755-config-data\") pod \"f8f9cd63-d585-4053-b25b-3c0947f43755\" (UID: \"f8f9cd63-d585-4053-b25b-3c0947f43755\") " Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.715000 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/104ca851-1c21-41bd-8a92-423fdab83753-config-data\") pod \"104ca851-1c21-41bd-8a92-423fdab83753\" (UID: \"104ca851-1c21-41bd-8a92-423fdab83753\") " Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.715027 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/104ca851-1c21-41bd-8a92-423fdab83753-logs\") pod \"104ca851-1c21-41bd-8a92-423fdab83753\" (UID: \"104ca851-1c21-41bd-8a92-423fdab83753\") " Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.715088 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8f9cd63-d585-4053-b25b-3c0947f43755-vencrypt-tls-certs\") pod \"f8f9cd63-d585-4053-b25b-3c0947f43755\" (UID: \"f8f9cd63-d585-4053-b25b-3c0947f43755\") " Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.716202 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/104ca851-1c21-41bd-8a92-423fdab83753-logs" (OuterVolumeSpecName: "logs") pod "104ca851-1c21-41bd-8a92-423fdab83753" (UID: "104ca851-1c21-41bd-8a92-423fdab83753"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.745214 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/104ca851-1c21-41bd-8a92-423fdab83753-kube-api-access-2gwkm" (OuterVolumeSpecName: "kube-api-access-2gwkm") pod "104ca851-1c21-41bd-8a92-423fdab83753" (UID: "104ca851-1c21-41bd-8a92-423fdab83753"). InnerVolumeSpecName "kube-api-access-2gwkm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.745519 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8f9cd63-d585-4053-b25b-3c0947f43755-kube-api-access-f5qws" (OuterVolumeSpecName: "kube-api-access-f5qws") pod "f8f9cd63-d585-4053-b25b-3c0947f43755" (UID: "f8f9cd63-d585-4053-b25b-3c0947f43755"). InnerVolumeSpecName "kube-api-access-f5qws". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.757276 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8f9cd63-d585-4053-b25b-3c0947f43755-config-data" (OuterVolumeSpecName: "config-data") pod "f8f9cd63-d585-4053-b25b-3c0947f43755" (UID: "f8f9cd63-d585-4053-b25b-3c0947f43755"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.757312 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/104ca851-1c21-41bd-8a92-423fdab83753-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "104ca851-1c21-41bd-8a92-423fdab83753" (UID: "104ca851-1c21-41bd-8a92-423fdab83753"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.804157 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/104ca851-1c21-41bd-8a92-423fdab83753-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "104ca851-1c21-41bd-8a92-423fdab83753" (UID: "104ca851-1c21-41bd-8a92-423fdab83753"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.818178 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/104ca851-1c21-41bd-8a92-423fdab83753-logs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.818544 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5qws\" (UniqueName: \"kubernetes.io/projected/f8f9cd63-d585-4053-b25b-3c0947f43755-kube-api-access-f5qws\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.818612 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/104ca851-1c21-41bd-8a92-423fdab83753-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.818725 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2gwkm\" (UniqueName: \"kubernetes.io/projected/104ca851-1c21-41bd-8a92-423fdab83753-kube-api-access-2gwkm\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.818805 4869 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/104ca851-1c21-41bd-8a92-423fdab83753-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.818866 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8f9cd63-d585-4053-b25b-3c0947f43755-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.840091 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8f9cd63-d585-4053-b25b-3c0947f43755-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f8f9cd63-d585-4053-b25b-3c0947f43755" (UID: "f8f9cd63-d585-4053-b25b-3c0947f43755"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.883363 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/104ca851-1c21-41bd-8a92-423fdab83753-config-data" (OuterVolumeSpecName: "config-data") pod "104ca851-1c21-41bd-8a92-423fdab83753" (UID: "104ca851-1c21-41bd-8a92-423fdab83753"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.889206 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8f9cd63-d585-4053-b25b-3c0947f43755-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "f8f9cd63-d585-4053-b25b-3c0947f43755" (UID: "f8f9cd63-d585-4053-b25b-3c0947f43755"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.891349 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="65d95395-5aea-4546-b12a-ec8ce58ec704" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.165:8776/healthcheck\": read tcp 10.217.0.2:46092->10.217.0.165:8776: read: connection reset by peer" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.896962 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8f9cd63-d585-4053-b25b-3c0947f43755-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "f8f9cd63-d585-4053-b25b-3c0947f43755" (UID: "f8f9cd63-d585-4053-b25b-3c0947f43755"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.923037 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8f9cd63-d585-4053-b25b-3c0947f43755-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.923081 4869 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8f9cd63-d585-4053-b25b-3c0947f43755-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.923095 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/104ca851-1c21-41bd-8a92-423fdab83753-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:10 crc kubenswrapper[4869]: I0130 11:16:10.923105 4869 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8f9cd63-d585-4053-b25b-3c0947f43755-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:11 crc kubenswrapper[4869]: E0130 11:16:11.026110 4869 secret.go:188] Couldn't get secret openstack/glance-scripts: secret "glance-scripts" not found Jan 30 11:16:11 crc kubenswrapper[4869]: E0130 11:16:11.026191 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-scripts podName:ec34c29c-665f-465a-99d0-c342aca2cf14 nodeName:}" failed. No retries permitted until 2026-01-30 11:16:15.02617223 +0000 UTC m=+1325.576048306 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-scripts") pod "glance-default-internal-api-0" (UID: "ec34c29c-665f-465a-99d0-c342aca2cf14") : secret "glance-scripts" not found Jan 30 11:16:11 crc kubenswrapper[4869]: E0130 11:16:11.026258 4869 secret.go:188] Couldn't get secret openstack/glance-default-internal-config-data: secret "glance-default-internal-config-data" not found Jan 30 11:16:11 crc kubenswrapper[4869]: E0130 11:16:11.026329 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-config-data podName:ec34c29c-665f-465a-99d0-c342aca2cf14 nodeName:}" failed. No retries permitted until 2026-01-30 11:16:15.026308114 +0000 UTC m=+1325.576184260 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-config-data") pod "glance-default-internal-api-0" (UID: "ec34c29c-665f-465a-99d0-c342aca2cf14") : secret "glance-default-internal-config-data" not found Jan 30 11:16:11 crc kubenswrapper[4869]: E0130 11:16:11.141254 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8df9d8157f152c454f2d5faa24504c9f3a81b6b09571d72c7ebea240aaa6074b is running failed: container process not found" containerID="8df9d8157f152c454f2d5faa24504c9f3a81b6b09571d72c7ebea240aaa6074b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 30 11:16:11 crc kubenswrapper[4869]: E0130 11:16:11.141757 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8df9d8157f152c454f2d5faa24504c9f3a81b6b09571d72c7ebea240aaa6074b is running failed: container process not found" containerID="8df9d8157f152c454f2d5faa24504c9f3a81b6b09571d72c7ebea240aaa6074b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 30 11:16:11 crc kubenswrapper[4869]: E0130 11:16:11.142212 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8df9d8157f152c454f2d5faa24504c9f3a81b6b09571d72c7ebea240aaa6074b is running failed: container process not found" containerID="8df9d8157f152c454f2d5faa24504c9f3a81b6b09571d72c7ebea240aaa6074b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 30 11:16:11 crc kubenswrapper[4869]: E0130 11:16:11.142245 4869 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8df9d8157f152c454f2d5faa24504c9f3a81b6b09571d72c7ebea240aaa6074b is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="ba3b2f08-608c-49db-b58c-f20480a51bba" containerName="nova-cell0-conductor-conductor" Jan 30 11:16:11 crc kubenswrapper[4869]: I0130 11:16:11.169741 4869 scope.go:117] "RemoveContainer" containerID="8dd5852d63fc179fcf40f8c40a3a27e1e4ca18f73ee8966299e8329c0ac8d776" Jan 30 11:16:11 crc kubenswrapper[4869]: I0130 11:16:11.188109 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-70de-account-create-update-9sbz8" Jan 30 11:16:11 crc kubenswrapper[4869]: I0130 11:16:11.202056 4869 scope.go:117] "RemoveContainer" containerID="9f060107943b0642dfd7e507c493ff833b9b292bb9f38467328dd22ddf77c864" Jan 30 11:16:11 crc kubenswrapper[4869]: I0130 11:16:11.204095 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 30 11:16:11 crc kubenswrapper[4869]: I0130 11:16:11.229327 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/48318921-34ba-442b-b9f0-6f7057d5cdf5-operator-scripts\") pod \"48318921-34ba-442b-b9f0-6f7057d5cdf5\" (UID: \"48318921-34ba-442b-b9f0-6f7057d5cdf5\") " Jan 30 11:16:11 crc kubenswrapper[4869]: I0130 11:16:11.229457 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4lbr7\" (UniqueName: \"kubernetes.io/projected/48318921-34ba-442b-b9f0-6f7057d5cdf5-kube-api-access-4lbr7\") pod \"48318921-34ba-442b-b9f0-6f7057d5cdf5\" (UID: \"48318921-34ba-442b-b9f0-6f7057d5cdf5\") " Jan 30 11:16:11 crc kubenswrapper[4869]: I0130 11:16:11.230145 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48318921-34ba-442b-b9f0-6f7057d5cdf5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "48318921-34ba-442b-b9f0-6f7057d5cdf5" (UID: "48318921-34ba-442b-b9f0-6f7057d5cdf5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.240647 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.240970 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b94e67f1-cfa7-4470-96ad-440a78a7707e" containerName="ceilometer-central-agent" containerID="cri-o://c37c781ec6bbb85eee14167c524d88c4bc0c5851de4283fda9feb6b69d175421" gracePeriod=30 Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.241118 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b94e67f1-cfa7-4470-96ad-440a78a7707e" containerName="proxy-httpd" containerID="cri-o://d6f67b2e1962982646c99be9310fe46368582436c0f28ba1d79bd9af395475fc" gracePeriod=30 Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.241157 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b94e67f1-cfa7-4470-96ad-440a78a7707e" containerName="sg-core" containerID="cri-o://27b5ea189bd4822ad549497c90e0af47c89f4bcaf3a147f407784ea7d6d2c6c9" gracePeriod=30 Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.241190 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b94e67f1-cfa7-4470-96ad-440a78a7707e" containerName="ceilometer-notification-agent" containerID="cri-o://2df8b1eba74a41c2b063c8fbcf197a06c9966c6a5ba096d492ead9e41ab7aa2e" gracePeriod=30 Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.242142 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/48318921-34ba-442b-b9f0-6f7057d5cdf5-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.255139 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.255386 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="d098b42f-f300-4308-93b0-fe2af785ce4c" containerName="kube-state-metrics" containerID="cri-o://b9690da5434b6d4146f8eab01da1057397213e30a8e072731befa7042dbba543" gracePeriod=30 Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.254857 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48318921-34ba-442b-b9f0-6f7057d5cdf5-kube-api-access-4lbr7" (OuterVolumeSpecName: "kube-api-access-4lbr7") pod "48318921-34ba-442b-b9f0-6f7057d5cdf5" (UID: "48318921-34ba-442b-b9f0-6f7057d5cdf5"). InnerVolumeSpecName "kube-api-access-4lbr7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.267600 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.324984 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-86a6-account-create-update-ch52c" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.342352 4869 scope.go:117] "RemoveContainer" containerID="f645ebc573f7dd5869dcdf3ccfab2bce9e8305d65b43a7373e7b2cef92aec27f" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.345076 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-7037-account-create-update-28qzk" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.344655 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-combined-ca-bundle\") pod \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.345304 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4618ceff-14a9-4866-aa22-e29767d8d7e4-operator-scripts\") pod \"4618ceff-14a9-4866-aa22-e29767d8d7e4\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.345373 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-run-httpd\") pod \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.345425 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4618ceff-14a9-4866-aa22-e29767d8d7e4-config-data-generated\") pod \"4618ceff-14a9-4866-aa22-e29767d8d7e4\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.345452 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4618ceff-14a9-4866-aa22-e29767d8d7e4-galera-tls-certs\") pod \"4618ceff-14a9-4866-aa22-e29767d8d7e4\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.345548 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tlcdh\" (UniqueName: \"kubernetes.io/projected/4618ceff-14a9-4866-aa22-e29767d8d7e4-kube-api-access-tlcdh\") pod \"4618ceff-14a9-4866-aa22-e29767d8d7e4\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.345618 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4618ceff-14a9-4866-aa22-e29767d8d7e4-config-data-default\") pod \"4618ceff-14a9-4866-aa22-e29767d8d7e4\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.345689 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4618ceff-14a9-4866-aa22-e29767d8d7e4-kolla-config\") pod \"4618ceff-14a9-4866-aa22-e29767d8d7e4\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.345767 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-public-tls-certs\") pod \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.345832 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-config-data\") pod \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.345898 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-internal-tls-certs\") pod \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.345926 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-etc-swift\") pod \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.346010 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nqjkw\" (UniqueName: \"kubernetes.io/projected/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-kube-api-access-nqjkw\") pod \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.346115 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"4618ceff-14a9-4866-aa22-e29767d8d7e4\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.346204 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-log-httpd\") pod \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\" (UID: \"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.346271 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4618ceff-14a9-4866-aa22-e29767d8d7e4-combined-ca-bundle\") pod \"4618ceff-14a9-4866-aa22-e29767d8d7e4\" (UID: \"4618ceff-14a9-4866-aa22-e29767d8d7e4\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.347441 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4618ceff-14a9-4866-aa22-e29767d8d7e4-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "4618ceff-14a9-4866-aa22-e29767d8d7e4" (UID: "4618ceff-14a9-4866-aa22-e29767d8d7e4"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.347493 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4lbr7\" (UniqueName: \"kubernetes.io/projected/48318921-34ba-442b-b9f0-6f7057d5cdf5-kube-api-access-4lbr7\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.347600 4869 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.347686 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-config-data podName:15b1a123-3831-4fa6-bc52-3f0cf30953f9 nodeName:}" failed. No retries permitted until 2026-01-30 11:16:15.347664851 +0000 UTC m=+1325.897541017 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-config-data") pod "rabbitmq-cell1-server-0" (UID: "15b1a123-3831-4fa6-bc52-3f0cf30953f9") : configmap "rabbitmq-cell1-config-data" not found Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.347879 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4618ceff-14a9-4866-aa22-e29767d8d7e4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4618ceff-14a9-4866-aa22-e29767d8d7e4" (UID: "4618ceff-14a9-4866-aa22-e29767d8d7e4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.348427 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4618ceff-14a9-4866-aa22-e29767d8d7e4-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "4618ceff-14a9-4866-aa22-e29767d8d7e4" (UID: "4618ceff-14a9-4866-aa22-e29767d8d7e4"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.349741 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4618ceff-14a9-4866-aa22-e29767d8d7e4-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "4618ceff-14a9-4866-aa22-e29767d8d7e4" (UID: "4618ceff-14a9-4866-aa22-e29767d8d7e4"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.354906 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f" (UID: "e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.358941 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f" (UID: "e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.359122 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f" (UID: "e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.360521 4869 generic.go:334] "Generic (PLEG): container finished" podID="f5b9f902-0038-4057-b1c2-66222926c1b5" containerID="bf97f8988324fe855d157ff3d7e5e4eff62fa9c240599bdf67db6d8726b0793a" exitCode=1 Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.360586 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-zjpp9" event={"ID":"f5b9f902-0038-4057-b1c2-66222926c1b5","Type":"ContainerDied","Data":"bf97f8988324fe855d157ff3d7e5e4eff62fa9c240599bdf67db6d8726b0793a"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.361558 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.362343 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4618ceff-14a9-4866-aa22-e29767d8d7e4-kube-api-access-tlcdh" (OuterVolumeSpecName: "kube-api-access-tlcdh") pod "4618ceff-14a9-4866-aa22-e29767d8d7e4" (UID: "4618ceff-14a9-4866-aa22-e29767d8d7e4"). InnerVolumeSpecName "kube-api-access-tlcdh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.374353 4869 scope.go:117] "RemoveContainer" containerID="bf97f8988324fe855d157ff3d7e5e4eff62fa9c240599bdf67db6d8726b0793a" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.375044 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-create-update pod=root-account-create-update-zjpp9_openstack(f5b9f902-0038-4057-b1c2-66222926c1b5)\"" pod="openstack/root-account-create-update-zjpp9" podUID="f5b9f902-0038-4057-b1c2-66222926c1b5" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.392982 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-kube-api-access-nqjkw" (OuterVolumeSpecName: "kube-api-access-nqjkw") pod "e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f" (UID: "e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f"). InnerVolumeSpecName "kube-api-access-nqjkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.436512 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4618ceff-14a9-4866-aa22-e29767d8d7e4","Type":"ContainerDied","Data":"7e97ede44f9dcded717a0d3ce3c86515c75392d1854bab5bbd0e148cbdfd9b57"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.436606 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.455474 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.456031 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b9320a43-d34c-4f43-b304-8f6414a44b33-operator-scripts\") pod \"b9320a43-d34c-4f43-b304-8f6414a44b33\" (UID: \"b9320a43-d34c-4f43-b304-8f6414a44b33\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.456075 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tbx67\" (UniqueName: \"kubernetes.io/projected/ba3b2f08-608c-49db-b58c-f20480a51bba-kube-api-access-tbx67\") pod \"ba3b2f08-608c-49db-b58c-f20480a51bba\" (UID: \"ba3b2f08-608c-49db-b58c-f20480a51bba\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.456097 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba3b2f08-608c-49db-b58c-f20480a51bba-combined-ca-bundle\") pod \"ba3b2f08-608c-49db-b58c-f20480a51bba\" (UID: \"ba3b2f08-608c-49db-b58c-f20480a51bba\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.456169 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a07399e-d252-46b3-823d-3fabceb4b671-operator-scripts\") pod \"5a07399e-d252-46b3-823d-3fabceb4b671\" (UID: \"5a07399e-d252-46b3-823d-3fabceb4b671\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.456189 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2hkds\" (UniqueName: \"kubernetes.io/projected/5a07399e-d252-46b3-823d-3fabceb4b671-kube-api-access-2hkds\") pod \"5a07399e-d252-46b3-823d-3fabceb4b671\" (UID: \"5a07399e-d252-46b3-823d-3fabceb4b671\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.456244 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7dmdx\" (UniqueName: \"kubernetes.io/projected/b9320a43-d34c-4f43-b304-8f6414a44b33-kube-api-access-7dmdx\") pod \"b9320a43-d34c-4f43-b304-8f6414a44b33\" (UID: \"b9320a43-d34c-4f43-b304-8f6414a44b33\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.456327 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba3b2f08-608c-49db-b58c-f20480a51bba-config-data\") pod \"ba3b2f08-608c-49db-b58c-f20480a51bba\" (UID: \"ba3b2f08-608c-49db-b58c-f20480a51bba\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.456782 4869 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4618ceff-14a9-4866-aa22-e29767d8d7e4-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.456792 4869 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.456801 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nqjkw\" (UniqueName: \"kubernetes.io/projected/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-kube-api-access-nqjkw\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.456812 4869 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.456821 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4618ceff-14a9-4866-aa22-e29767d8d7e4-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.456828 4869 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.456837 4869 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4618ceff-14a9-4866-aa22-e29767d8d7e4-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.456846 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tlcdh\" (UniqueName: \"kubernetes.io/projected/4618ceff-14a9-4866-aa22-e29767d8d7e4-kube-api-access-tlcdh\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.456854 4869 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4618ceff-14a9-4866-aa22-e29767d8d7e4-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.457809 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a07399e-d252-46b3-823d-3fabceb4b671-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5a07399e-d252-46b3-823d-3fabceb4b671" (UID: "5a07399e-d252-46b3-823d-3fabceb4b671"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.458580 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9320a43-d34c-4f43-b304-8f6414a44b33-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b9320a43-d34c-4f43-b304-8f6414a44b33" (UID: "b9320a43-d34c-4f43-b304-8f6414a44b33"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.466434 4869 generic.go:334] "Generic (PLEG): container finished" podID="0f954356-d9a2-4183-9033-adf859e722e4" containerID="2252e68fa2bb62b190cb42ee1412af4d52c8c7aad25244c365965ac8e5919d92" exitCode=0 Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.466507 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0f954356-d9a2-4183-9033-adf859e722e4","Type":"ContainerDied","Data":"2252e68fa2bb62b190cb42ee1412af4d52c8c7aad25244c365965ac8e5919d92"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.466538 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0f954356-d9a2-4183-9033-adf859e722e4","Type":"ContainerDied","Data":"0529f67053215369e9d9b7ea07ab951ef49f1f1667e6677d142fb5ca4a0066c1"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.493006 4869 generic.go:334] "Generic (PLEG): container finished" podID="323f79a2-48c7-4768-8707-23bc31755a50" containerID="d02ef15f5ba93cc3d7b5586f76cca1e5e8d3253af837813c4d8c7db13197b4d6" exitCode=0 Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.493119 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-587f84cd84-zqhxn" event={"ID":"323f79a2-48c7-4768-8707-23bc31755a50","Type":"ContainerDied","Data":"d02ef15f5ba93cc3d7b5586f76cca1e5e8d3253af837813c4d8c7db13197b4d6"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.504654 4869 generic.go:334] "Generic (PLEG): container finished" podID="ba3b2f08-608c-49db-b58c-f20480a51bba" containerID="8df9d8157f152c454f2d5faa24504c9f3a81b6b09571d72c7ebea240aaa6074b" exitCode=0 Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.504806 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ba3b2f08-608c-49db-b58c-f20480a51bba","Type":"ContainerDied","Data":"8df9d8157f152c454f2d5faa24504c9f3a81b6b09571d72c7ebea240aaa6074b"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.504837 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ba3b2f08-608c-49db-b58c-f20480a51bba","Type":"ContainerDied","Data":"e06db928b0be9812af36d023e4a2bb0bb10824f7c51148f1fd9d9b6abee6e978"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.504910 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.527406 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba3b2f08-608c-49db-b58c-f20480a51bba-kube-api-access-tbx67" (OuterVolumeSpecName: "kube-api-access-tbx67") pod "ba3b2f08-608c-49db-b58c-f20480a51bba" (UID: "ba3b2f08-608c-49db-b58c-f20480a51bba"). InnerVolumeSpecName "kube-api-access-tbx67". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.552017 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-cd90-account-create-update-ltwwr"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.559655 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a07399e-d252-46b3-823d-3fabceb4b671-kube-api-access-2hkds" (OuterVolumeSpecName: "kube-api-access-2hkds") pod "5a07399e-d252-46b3-823d-3fabceb4b671" (UID: "5a07399e-d252-46b3-823d-3fabceb4b671"). InnerVolumeSpecName "kube-api-access-2hkds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.576324 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f954356-d9a2-4183-9033-adf859e722e4-combined-ca-bundle\") pod \"0f954356-d9a2-4183-9033-adf859e722e4\" (UID: \"0f954356-d9a2-4183-9033-adf859e722e4\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.576382 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xrprx\" (UniqueName: \"kubernetes.io/projected/0f954356-d9a2-4183-9033-adf859e722e4-kube-api-access-xrprx\") pod \"0f954356-d9a2-4183-9033-adf859e722e4\" (UID: \"0f954356-d9a2-4183-9033-adf859e722e4\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.576583 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f954356-d9a2-4183-9033-adf859e722e4-config-data\") pod \"0f954356-d9a2-4183-9033-adf859e722e4\" (UID: \"0f954356-d9a2-4183-9033-adf859e722e4\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.577074 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b9320a43-d34c-4f43-b304-8f6414a44b33-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.577085 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tbx67\" (UniqueName: \"kubernetes.io/projected/ba3b2f08-608c-49db-b58c-f20480a51bba-kube-api-access-tbx67\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.577096 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a07399e-d252-46b3-823d-3fabceb4b671-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.577104 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2hkds\" (UniqueName: \"kubernetes.io/projected/5a07399e-d252-46b3-823d-3fabceb4b671-kube-api-access-2hkds\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.580358 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.583976 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-cd90-account-create-update-ltwwr"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.584047 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-65d8584cdc-bgnk8" event={"ID":"104ca851-1c21-41bd-8a92-423fdab83753","Type":"ContainerDied","Data":"370b9adeeba2894047536a2b4dc7374948fe222b74d0629f4f697dc3e699ff8f"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.584204 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-65d8584cdc-bgnk8" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.593423 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-66466f9898-mzt77" event={"ID":"45d8f6aa-887f-444b-81c8-7bf6c03993c9","Type":"ContainerDied","Data":"7eff2f1e87b8492e8940c53d5afc8748f86124ec76ec391c6784cc81c39859a8"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.593552 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-66466f9898-mzt77" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.598793 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-86a6-account-create-update-ch52c" event={"ID":"b9320a43-d34c-4f43-b304-8f6414a44b33","Type":"ContainerDied","Data":"b1378dcb5d8617ca2d069b38cc24a3920e54b6791f959f0a7314ff01c2aed810"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.598869 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-86a6-account-create-update-ch52c" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.601208 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-70de-account-create-update-9sbz8" event={"ID":"48318921-34ba-442b-b9f0-6f7057d5cdf5","Type":"ContainerDied","Data":"45a82d988463a6192c4a79e192c1a6d2dd6d594b4393d574c865a9b1c86858c8"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.601289 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-70de-account-create-update-9sbz8" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.604060 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9320a43-d34c-4f43-b304-8f6414a44b33-kube-api-access-7dmdx" (OuterVolumeSpecName: "kube-api-access-7dmdx") pod "b9320a43-d34c-4f43-b304-8f6414a44b33" (UID: "b9320a43-d34c-4f43-b304-8f6414a44b33"). InnerVolumeSpecName "kube-api-access-7dmdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.614601 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f954356-d9a2-4183-9033-adf859e722e4-kube-api-access-xrprx" (OuterVolumeSpecName: "kube-api-access-xrprx") pod "0f954356-d9a2-4183-9033-adf859e722e4" (UID: "0f954356-d9a2-4183-9033-adf859e722e4"). InnerVolumeSpecName "kube-api-access-xrprx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.622064 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "mysql-db") pod "4618ceff-14a9-4866-aa22-e29767d8d7e4" (UID: "4618ceff-14a9-4866-aa22-e29767d8d7e4"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.627857 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.628086 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/memcached-0" podUID="bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b" containerName="memcached" containerID="cri-o://6e3f92f98f69d9675547c36cfbba2bee734bf6e6196d8d62fa53a9ae6f597e60" gracePeriod=30 Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.631326 4869 scope.go:117] "RemoveContainer" containerID="04cb5cbbd47e7e666f5e169d92064f3b480d2b19c91ebb32ff908f9eaa6eacd0" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.639754 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cd90-account-create-update-7lmjl"] Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.640264 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aefb9658-d09a-4e8d-9769-3d6133bd4b2c" containerName="openstack-network-exporter" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640277 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="aefb9658-d09a-4e8d-9769-3d6133bd4b2c" containerName="openstack-network-exporter" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.640290 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="104ca851-1c21-41bd-8a92-423fdab83753" containerName="barbican-worker" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640296 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="104ca851-1c21-41bd-8a92-423fdab83753" containerName="barbican-worker" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.640308 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f" containerName="proxy-httpd" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640316 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f" containerName="proxy-httpd" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.640329 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c446ec70-c06c-4301-987c-423882ca1469" containerName="dnsmasq-dns" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640337 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="c446ec70-c06c-4301-987c-423882ca1469" containerName="dnsmasq-dns" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.640352 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba3b2f08-608c-49db-b58c-f20480a51bba" containerName="nova-cell0-conductor-conductor" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640359 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba3b2f08-608c-49db-b58c-f20480a51bba" containerName="nova-cell0-conductor-conductor" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.640368 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="286d79ce-b123-48b8-b8d1-9a1696fe00bb" containerName="openstack-network-exporter" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640374 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="286d79ce-b123-48b8-b8d1-9a1696fe00bb" containerName="openstack-network-exporter" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.640385 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f954356-d9a2-4183-9033-adf859e722e4" containerName="nova-scheduler-scheduler" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640390 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f954356-d9a2-4183-9033-adf859e722e4" containerName="nova-scheduler-scheduler" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.640399 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="323f79a2-48c7-4768-8707-23bc31755a50" containerName="placement-log" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640404 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="323f79a2-48c7-4768-8707-23bc31755a50" containerName="placement-log" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.640416 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c446ec70-c06c-4301-987c-423882ca1469" containerName="init" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640421 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="c446ec70-c06c-4301-987c-423882ca1469" containerName="init" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.640434 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="104ca851-1c21-41bd-8a92-423fdab83753" containerName="barbican-worker-log" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640440 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="104ca851-1c21-41bd-8a92-423fdab83753" containerName="barbican-worker-log" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.640454 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f" containerName="proxy-server" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640460 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f" containerName="proxy-server" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.640478 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45d8f6aa-887f-444b-81c8-7bf6c03993c9" containerName="barbican-keystone-listener-log" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640483 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="45d8f6aa-887f-444b-81c8-7bf6c03993c9" containerName="barbican-keystone-listener-log" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.640492 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="333bf862-5ea9-43df-926f-5d8e463b2c80" containerName="openstack-network-exporter" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640497 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="333bf862-5ea9-43df-926f-5d8e463b2c80" containerName="openstack-network-exporter" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.640507 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aefb9658-d09a-4e8d-9769-3d6133bd4b2c" containerName="ovsdbserver-sb" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640514 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="aefb9658-d09a-4e8d-9769-3d6133bd4b2c" containerName="ovsdbserver-sb" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.640524 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="286d79ce-b123-48b8-b8d1-9a1696fe00bb" containerName="ovsdbserver-nb" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640529 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="286d79ce-b123-48b8-b8d1-9a1696fe00bb" containerName="ovsdbserver-nb" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.640538 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4618ceff-14a9-4866-aa22-e29767d8d7e4" containerName="galera" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640543 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4618ceff-14a9-4866-aa22-e29767d8d7e4" containerName="galera" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.640554 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="323f79a2-48c7-4768-8707-23bc31755a50" containerName="placement-api" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640560 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="323f79a2-48c7-4768-8707-23bc31755a50" containerName="placement-api" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.640569 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8f9cd63-d585-4053-b25b-3c0947f43755" containerName="nova-cell1-novncproxy-novncproxy" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640576 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8f9cd63-d585-4053-b25b-3c0947f43755" containerName="nova-cell1-novncproxy-novncproxy" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.640586 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45d8f6aa-887f-444b-81c8-7bf6c03993c9" containerName="barbican-keystone-listener" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640592 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="45d8f6aa-887f-444b-81c8-7bf6c03993c9" containerName="barbican-keystone-listener" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.640602 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4618ceff-14a9-4866-aa22-e29767d8d7e4" containerName="mysql-bootstrap" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640608 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4618ceff-14a9-4866-aa22-e29767d8d7e4" containerName="mysql-bootstrap" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640876 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba3b2f08-608c-49db-b58c-f20480a51bba" containerName="nova-cell0-conductor-conductor" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640902 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="45d8f6aa-887f-444b-81c8-7bf6c03993c9" containerName="barbican-keystone-listener" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640912 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8f9cd63-d585-4053-b25b-3c0947f43755" containerName="nova-cell1-novncproxy-novncproxy" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640923 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="aefb9658-d09a-4e8d-9769-3d6133bd4b2c" containerName="ovsdbserver-sb" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640933 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f954356-d9a2-4183-9033-adf859e722e4" containerName="nova-scheduler-scheduler" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640941 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="323f79a2-48c7-4768-8707-23bc31755a50" containerName="placement-log" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640953 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="4618ceff-14a9-4866-aa22-e29767d8d7e4" containerName="galera" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640950 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640964 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="104ca851-1c21-41bd-8a92-423fdab83753" containerName="barbican-worker" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640972 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f" containerName="proxy-server" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640981 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="323f79a2-48c7-4768-8707-23bc31755a50" containerName="placement-api" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640988 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="104ca851-1c21-41bd-8a92-423fdab83753" containerName="barbican-worker-log" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.640996 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="286d79ce-b123-48b8-b8d1-9a1696fe00bb" containerName="openstack-network-exporter" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.641004 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="aefb9658-d09a-4e8d-9769-3d6133bd4b2c" containerName="openstack-network-exporter" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.641011 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="286d79ce-b123-48b8-b8d1-9a1696fe00bb" containerName="ovsdbserver-nb" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.641018 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="45d8f6aa-887f-444b-81c8-7bf6c03993c9" containerName="barbican-keystone-listener-log" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.641024 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="c446ec70-c06c-4301-987c-423882ca1469" containerName="dnsmasq-dns" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.641032 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="333bf862-5ea9-43df-926f-5d8e463b2c80" containerName="openstack-network-exporter" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.641038 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f" containerName="proxy-httpd" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.641692 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"f8f9cd63-d585-4053-b25b-3c0947f43755","Type":"ContainerDied","Data":"43a7a6544d47d5d739aa8f18cf2928b603890be3121d118520247f97a26db8fe"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.641931 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cd90-account-create-update-7lmjl" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.652487 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.666371 4869 generic.go:334] "Generic (PLEG): container finished" podID="65d95395-5aea-4546-b12a-ec8ce58ec704" containerID="2d8084a7cade6c321549a9768cbcd158ea761ca605ab57b0333c081b4ad26652" exitCode=0 Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.666491 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"65d95395-5aea-4546-b12a-ec8ce58ec704","Type":"ContainerDied","Data":"2d8084a7cade6c321549a9768cbcd158ea761ca605ab57b0333c081b4ad26652"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.680467 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-combined-ca-bundle\") pod \"323f79a2-48c7-4768-8707-23bc31755a50\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.680659 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-config-data\") pod \"323f79a2-48c7-4768-8707-23bc31755a50\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.680759 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-px658\" (UniqueName: \"kubernetes.io/projected/323f79a2-48c7-4768-8707-23bc31755a50-kube-api-access-px658\") pod \"323f79a2-48c7-4768-8707-23bc31755a50\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.680788 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-public-tls-certs\") pod \"323f79a2-48c7-4768-8707-23bc31755a50\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.680816 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/323f79a2-48c7-4768-8707-23bc31755a50-logs\") pod \"323f79a2-48c7-4768-8707-23bc31755a50\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.680847 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-internal-tls-certs\") pod \"323f79a2-48c7-4768-8707-23bc31755a50\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.680868 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-scripts\") pod \"323f79a2-48c7-4768-8707-23bc31755a50\" (UID: \"323f79a2-48c7-4768-8707-23bc31755a50\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.681238 4869 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.681251 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xrprx\" (UniqueName: \"kubernetes.io/projected/0f954356-d9a2-4183-9033-adf859e722e4-kube-api-access-xrprx\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.681262 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7dmdx\" (UniqueName: \"kubernetes.io/projected/b9320a43-d34c-4f43-b304-8f6414a44b33-kube-api-access-7dmdx\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.686090 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/323f79a2-48c7-4768-8707-23bc31755a50-logs" (OuterVolumeSpecName: "logs") pod "323f79a2-48c7-4768-8707-23bc31755a50" (UID: "323f79a2-48c7-4768-8707-23bc31755a50"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.688102 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cd90-account-create-update-7lmjl"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.734001 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-97bpn"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.734399 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-df6cb98f-8s46w" podUID="484ba6c3-20dc-4b27-b7f5-901eef0643a7" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.166:9696/\": dial tcp 10.217.0.166:9696: connect: connection refused" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.746452 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-644f9f48bf-ccrr2" event={"ID":"e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f","Type":"ContainerDied","Data":"7694188781fbf71f3d2d8ff8e3eaf626d758903a5959565bb1ec077f16e0ceae"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.746598 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-644f9f48bf-ccrr2" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.759861 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-97bpn"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.765478 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-7037-account-create-update-28qzk" event={"ID":"5a07399e-d252-46b3-823d-3fabceb4b671","Type":"ContainerDied","Data":"0b3c2aa53e448a218994b086914a1894920c8af537076a775688de8b5628ce63"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.766076 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-7037-account-create-update-28qzk" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.796684 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/323f79a2-48c7-4768-8707-23bc31755a50-kube-api-access-px658" (OuterVolumeSpecName: "kube-api-access-px658") pod "323f79a2-48c7-4768-8707-23bc31755a50" (UID: "323f79a2-48c7-4768-8707-23bc31755a50"). InnerVolumeSpecName "kube-api-access-px658". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.796872 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-scripts" (OuterVolumeSpecName: "scripts") pod "323f79a2-48c7-4768-8707-23bc31755a50" (UID: "323f79a2-48c7-4768-8707-23bc31755a50"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.797534 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9txzz\" (UniqueName: \"kubernetes.io/projected/5339c4ee-a589-4517-bdbc-98f4f5dbb356-kube-api-access-9txzz\") pod \"keystone-cd90-account-create-update-7lmjl\" (UID: \"5339c4ee-a589-4517-bdbc-98f4f5dbb356\") " pod="openstack/keystone-cd90-account-create-update-7lmjl" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.797756 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5339c4ee-a589-4517-bdbc-98f4f5dbb356-operator-scripts\") pod \"keystone-cd90-account-create-update-7lmjl\" (UID: \"5339c4ee-a589-4517-bdbc-98f4f5dbb356\") " pod="openstack/keystone-cd90-account-create-update-7lmjl" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.807867 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.808868 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-px658\" (UniqueName: \"kubernetes.io/projected/323f79a2-48c7-4768-8707-23bc31755a50-kube-api-access-px658\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.808883 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/323f79a2-48c7-4768-8707-23bc31755a50-logs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.829380 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f954356-d9a2-4183-9033-adf859e722e4-config-data" (OuterVolumeSpecName: "config-data") pod "0f954356-d9a2-4183-9033-adf859e722e4" (UID: "0f954356-d9a2-4183-9033-adf859e722e4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.830602 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f" (UID: "e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.831944 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4618ceff-14a9-4866-aa22-e29767d8d7e4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4618ceff-14a9-4866-aa22-e29767d8d7e4" (UID: "4618ceff-14a9-4866-aa22-e29767d8d7e4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.849317 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-k8rcl"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.870819 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f" (UID: "e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.871953 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-k8rcl"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.877208 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-config-data" (OuterVolumeSpecName: "config-data") pod "e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f" (UID: "e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.878326 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f" (UID: "e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.886562 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-6879fcbdc7-xgzr6"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.886872 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/keystone-6879fcbdc7-xgzr6" podUID="dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1" containerName="keystone-api" containerID="cri-o://8f7118bc51e624fc83d241708fa7793b9fc18ea10bcad4284f453208f1ae3f69" gracePeriod=30 Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.904998 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba3b2f08-608c-49db-b58c-f20480a51bba-config-data" (OuterVolumeSpecName: "config-data") pod "ba3b2f08-608c-49db-b58c-f20480a51bba" (UID: "ba3b2f08-608c-49db-b58c-f20480a51bba"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.911078 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9txzz\" (UniqueName: \"kubernetes.io/projected/5339c4ee-a589-4517-bdbc-98f4f5dbb356-kube-api-access-9txzz\") pod \"keystone-cd90-account-create-update-7lmjl\" (UID: \"5339c4ee-a589-4517-bdbc-98f4f5dbb356\") " pod="openstack/keystone-cd90-account-create-update-7lmjl" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.911166 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5339c4ee-a589-4517-bdbc-98f4f5dbb356-operator-scripts\") pod \"keystone-cd90-account-create-update-7lmjl\" (UID: \"5339c4ee-a589-4517-bdbc-98f4f5dbb356\") " pod="openstack/keystone-cd90-account-create-update-7lmjl" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.911247 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba3b2f08-608c-49db-b58c-f20480a51bba-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.911262 4869 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.911273 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f954356-d9a2-4183-9033-adf859e722e4-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.911281 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.911290 4869 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.911299 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4618ceff-14a9-4866-aa22-e29767d8d7e4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.911307 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.911379 4869 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.911434 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5339c4ee-a589-4517-bdbc-98f4f5dbb356-operator-scripts podName:5339c4ee-a589-4517-bdbc-98f4f5dbb356 nodeName:}" failed. No retries permitted until 2026-01-30 11:16:12.411416074 +0000 UTC m=+1322.961292140 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/5339c4ee-a589-4517-bdbc-98f4f5dbb356-operator-scripts") pod "keystone-cd90-account-create-update-7lmjl" (UID: "5339c4ee-a589-4517-bdbc-98f4f5dbb356") : configmap "openstack-scripts" not found Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.934246 4869 projected.go:194] Error preparing data for projected volume kube-api-access-9txzz for pod openstack/keystone-cd90-account-create-update-7lmjl: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:11.934334 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/5339c4ee-a589-4517-bdbc-98f4f5dbb356-kube-api-access-9txzz podName:5339c4ee-a589-4517-bdbc-98f4f5dbb356 nodeName:}" failed. No retries permitted until 2026-01-30 11:16:12.434311424 +0000 UTC m=+1322.984187490 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-9txzz" (UniqueName: "kubernetes.io/projected/5339c4ee-a589-4517-bdbc-98f4f5dbb356-kube-api-access-9txzz") pod "keystone-cd90-account-create-update-7lmjl" (UID: "5339c4ee-a589-4517-bdbc-98f4f5dbb356") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.939947 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f954356-d9a2-4183-9033-adf859e722e4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0f954356-d9a2-4183-9033-adf859e722e4" (UID: "0f954356-d9a2-4183-9033-adf859e722e4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.943387 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.960423 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-cqxhv"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.967296 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-cqxhv"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.969584 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4618ceff-14a9-4866-aa22-e29767d8d7e4-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "4618ceff-14a9-4866-aa22-e29767d8d7e4" (UID: "4618ceff-14a9-4866-aa22-e29767d8d7e4"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.973506 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba3b2f08-608c-49db-b58c-f20480a51bba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ba3b2f08-608c-49db-b58c-f20480a51bba" (UID: "ba3b2f08-608c-49db-b58c-f20480a51bba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.982247 4869 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.993866 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-9fb998c86-5qb5j" podUID="74632136-6311-4daa-80c7-4c32c20d6a4a" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.164:9311/healthcheck\": dial tcp 10.217.0.164:9311: connect: connection refused" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:11.994419 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-9fb998c86-5qb5j" podUID="74632136-6311-4daa-80c7-4c32c20d6a4a" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.164:9311/healthcheck\": dial tcp 10.217.0.164:9311: connect: connection refused" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.002385 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-cd90-account-create-update-7lmjl"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.014413 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f954356-d9a2-4183-9033-adf859e722e4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.014443 4869 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4618ceff-14a9-4866-aa22-e29767d8d7e4-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.014452 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba3b2f08-608c-49db-b58c-f20480a51bba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.014460 4869 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.019952 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-zjpp9"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.064932 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "323f79a2-48c7-4768-8707-23bc31755a50" (UID: "323f79a2-48c7-4768-8707-23bc31755a50"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.090667 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-config-data" (OuterVolumeSpecName: "config-data") pod "323f79a2-48c7-4768-8707-23bc31755a50" (UID: "323f79a2-48c7-4768-8707-23bc31755a50"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.117924 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.117952 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.120301 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "323f79a2-48c7-4768-8707-23bc31755a50" (UID: "323f79a2-48c7-4768-8707-23bc31755a50"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.121214 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "323f79a2-48c7-4768-8707-23bc31755a50" (UID: "323f79a2-48c7-4768-8707-23bc31755a50"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.146445 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b6486d6-add2-4abe-8ccb-35517810f949" path="/var/lib/kubelet/pods/1b6486d6-add2-4abe-8ccb-35517810f949/volumes" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.147418 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="286d79ce-b123-48b8-b8d1-9a1696fe00bb" path="/var/lib/kubelet/pods/286d79ce-b123-48b8-b8d1-9a1696fe00bb/volumes" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.148462 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="333bf862-5ea9-43df-926f-5d8e463b2c80" path="/var/lib/kubelet/pods/333bf862-5ea9-43df-926f-5d8e463b2c80/volumes" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.149377 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aefb9658-d09a-4e8d-9769-3d6133bd4b2c" path="/var/lib/kubelet/pods/aefb9658-d09a-4e8d-9769-3d6133bd4b2c/volumes" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.150614 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c446ec70-c06c-4301-987c-423882ca1469" path="/var/lib/kubelet/pods/c446ec70-c06c-4301-987c-423882ca1469/volumes" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.151375 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4048f70-ac71-4e16-b86c-ea67021c6c58" path="/var/lib/kubelet/pods/e4048f70-ac71-4e16-b86c-ea67021c6c58/volumes" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.152179 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d" path="/var/lib/kubelet/pods/e7ed99a1-5866-47cc-9c12-8bb4d3ea9a2d/volumes" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.153185 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9113947-7343-454e-a806-50db72e74a54" path="/var/lib/kubelet/pods/f9113947-7343-454e-a806-50db72e74a54/volumes" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.173161 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-galera-0" podUID="34532f6a-b213-422d-8126-d74d95c32497" containerName="galera" containerID="cri-o://b0b2edafa16cdb86c907a900b6834383a3f251d3d25ba3b9fa14aabedce02a03" gracePeriod=30 Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.226059 4869 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.226093 4869 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/323f79a2-48c7-4768-8707-23bc31755a50-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.248229 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="2ae8a334-b758-420e-8aae-a3f6437f9816" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.172:9292/healthcheck\": dial tcp 10.217.0.172:9292: connect: connection refused" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.248277 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="2ae8a334-b758-420e-8aae-a3f6437f9816" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.172:9292/healthcheck\": dial tcp 10.217.0.172:9292: connect: connection refused" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.418151 4869 scope.go:117] "RemoveContainer" containerID="ab6fefef94e67c7669176c91b03cf31715872e1eba1e24a159531ceb1c264993" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.422519 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:12.434196 4869 manager.go:1116] Failed to create existing container: /kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5a07399e_d252_46b3_823d_3fabceb4b671.slice/crio-0b3c2aa53e448a218994b086914a1894920c8af537076a775688de8b5628ce63: Error finding container 0b3c2aa53e448a218994b086914a1894920c8af537076a775688de8b5628ce63: Status 404 returned error can't find the container with id 0b3c2aa53e448a218994b086914a1894920c8af537076a775688de8b5628ce63 Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.435233 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9txzz\" (UniqueName: \"kubernetes.io/projected/5339c4ee-a589-4517-bdbc-98f4f5dbb356-kube-api-access-9txzz\") pod \"keystone-cd90-account-create-update-7lmjl\" (UID: \"5339c4ee-a589-4517-bdbc-98f4f5dbb356\") " pod="openstack/keystone-cd90-account-create-update-7lmjl" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:12.435589 4869 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.436380 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5339c4ee-a589-4517-bdbc-98f4f5dbb356-operator-scripts\") pod \"keystone-cd90-account-create-update-7lmjl\" (UID: \"5339c4ee-a589-4517-bdbc-98f4f5dbb356\") " pod="openstack/keystone-cd90-account-create-update-7lmjl" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:12.436416 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5339c4ee-a589-4517-bdbc-98f4f5dbb356-operator-scripts podName:5339c4ee-a589-4517-bdbc-98f4f5dbb356 nodeName:}" failed. No retries permitted until 2026-01-30 11:16:13.436392735 +0000 UTC m=+1323.986268801 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/5339c4ee-a589-4517-bdbc-98f4f5dbb356-operator-scripts") pod "keystone-cd90-account-create-update-7lmjl" (UID: "5339c4ee-a589-4517-bdbc-98f4f5dbb356") : configmap "openstack-scripts" not found Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:12.440611 4869 projected.go:194] Error preparing data for projected volume kube-api-access-9txzz for pod openstack/keystone-cd90-account-create-update-7lmjl: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:12.440887 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/5339c4ee-a589-4517-bdbc-98f4f5dbb356-kube-api-access-9txzz podName:5339c4ee-a589-4517-bdbc-98f4f5dbb356 nodeName:}" failed. No retries permitted until 2026-01-30 11:16:13.440752589 +0000 UTC m=+1323.990628655 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-9txzz" (UniqueName: "kubernetes.io/projected/5339c4ee-a589-4517-bdbc-98f4f5dbb356-kube-api-access-9txzz") pod "keystone-cd90-account-create-update-7lmjl" (UID: "5339c4ee-a589-4517-bdbc-98f4f5dbb356") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.461080 4869 scope.go:117] "RemoveContainer" containerID="022cd75b7d7edfb330306e68dd74e8e4b7a53321b13225e5a124ef1093f6c767" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:12.497505 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-9txzz operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/keystone-cd90-account-create-update-7lmjl" podUID="5339c4ee-a589-4517-bdbc-98f4f5dbb356" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.523967 4869 scope.go:117] "RemoveContainer" containerID="2252e68fa2bb62b190cb42ee1412af4d52c8c7aad25244c365965ac8e5919d92" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.533085 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="d945c4ca-288d-4e49-9048-b66894b7e97f" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.206:8775/\": dial tcp 10.217.0.206:8775: connect: connection refused" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.533222 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="d945c4ca-288d-4e49-9048-b66894b7e97f" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.206:8775/\": dial tcp 10.217.0.206:8775: connect: connection refused" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.534361 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-66466f9898-mzt77"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.538272 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-config-data-custom\") pod \"65d95395-5aea-4546-b12a-ec8ce58ec704\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.538316 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-public-tls-certs\") pod \"65d95395-5aea-4546-b12a-ec8ce58ec704\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.538364 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qjqk4\" (UniqueName: \"kubernetes.io/projected/65d95395-5aea-4546-b12a-ec8ce58ec704-kube-api-access-qjqk4\") pod \"65d95395-5aea-4546-b12a-ec8ce58ec704\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.538481 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65d95395-5aea-4546-b12a-ec8ce58ec704-logs\") pod \"65d95395-5aea-4546-b12a-ec8ce58ec704\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.538505 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-scripts\") pod \"65d95395-5aea-4546-b12a-ec8ce58ec704\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.538519 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-combined-ca-bundle\") pod \"65d95395-5aea-4546-b12a-ec8ce58ec704\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.538539 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-internal-tls-certs\") pod \"65d95395-5aea-4546-b12a-ec8ce58ec704\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.538564 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-config-data\") pod \"65d95395-5aea-4546-b12a-ec8ce58ec704\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.538586 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/65d95395-5aea-4546-b12a-ec8ce58ec704-etc-machine-id\") pod \"65d95395-5aea-4546-b12a-ec8ce58ec704\" (UID: \"65d95395-5aea-4546-b12a-ec8ce58ec704\") " Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.539138 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65d95395-5aea-4546-b12a-ec8ce58ec704-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "65d95395-5aea-4546-b12a-ec8ce58ec704" (UID: "65d95395-5aea-4546-b12a-ec8ce58ec704"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.545180 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "65d95395-5aea-4546-b12a-ec8ce58ec704" (UID: "65d95395-5aea-4546-b12a-ec8ce58ec704"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.547502 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65d95395-5aea-4546-b12a-ec8ce58ec704-logs" (OuterVolumeSpecName: "logs") pod "65d95395-5aea-4546-b12a-ec8ce58ec704" (UID: "65d95395-5aea-4546-b12a-ec8ce58ec704"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.554201 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-66466f9898-mzt77"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.594447 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65d95395-5aea-4546-b12a-ec8ce58ec704-kube-api-access-qjqk4" (OuterVolumeSpecName: "kube-api-access-qjqk4") pod "65d95395-5aea-4546-b12a-ec8ce58ec704" (UID: "65d95395-5aea-4546-b12a-ec8ce58ec704"). InnerVolumeSpecName "kube-api-access-qjqk4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.605919 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-7037-account-create-update-28qzk"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.609535 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-7037-account-create-update-28qzk"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.621870 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-scripts" (OuterVolumeSpecName: "scripts") pod "65d95395-5aea-4546-b12a-ec8ce58ec704" (UID: "65d95395-5aea-4546-b12a-ec8ce58ec704"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.622884 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.637186 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.637243 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-65d8584cdc-bgnk8"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.641223 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65d95395-5aea-4546-b12a-ec8ce58ec704-logs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.641255 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.641310 4869 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/65d95395-5aea-4546-b12a-ec8ce58ec704-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.641324 4869 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.641333 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qjqk4\" (UniqueName: \"kubernetes.io/projected/65d95395-5aea-4546-b12a-ec8ce58ec704-kube-api-access-qjqk4\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.641396 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-65d8584cdc-bgnk8"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.663380 4869 scope.go:117] "RemoveContainer" containerID="2252e68fa2bb62b190cb42ee1412af4d52c8c7aad25244c365965ac8e5919d92" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:12.666263 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2252e68fa2bb62b190cb42ee1412af4d52c8c7aad25244c365965ac8e5919d92\": container with ID starting with 2252e68fa2bb62b190cb42ee1412af4d52c8c7aad25244c365965ac8e5919d92 not found: ID does not exist" containerID="2252e68fa2bb62b190cb42ee1412af4d52c8c7aad25244c365965ac8e5919d92" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.666304 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2252e68fa2bb62b190cb42ee1412af4d52c8c7aad25244c365965ac8e5919d92"} err="failed to get container status \"2252e68fa2bb62b190cb42ee1412af4d52c8c7aad25244c365965ac8e5919d92\": rpc error: code = NotFound desc = could not find container \"2252e68fa2bb62b190cb42ee1412af4d52c8c7aad25244c365965ac8e5919d92\": container with ID starting with 2252e68fa2bb62b190cb42ee1412af4d52c8c7aad25244c365965ac8e5919d92 not found: ID does not exist" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.666328 4869 scope.go:117] "RemoveContainer" containerID="8df9d8157f152c454f2d5faa24504c9f3a81b6b09571d72c7ebea240aaa6074b" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.689304 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-70de-account-create-update-9sbz8"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.699424 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "65d95395-5aea-4546-b12a-ec8ce58ec704" (UID: "65d95395-5aea-4546-b12a-ec8ce58ec704"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.710675 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-70de-account-create-update-9sbz8"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.722316 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="4d1e4183-a136-428f-9bd8-e857a603da8f" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.101:5671: connect: connection refused" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.746911 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "65d95395-5aea-4546-b12a-ec8ce58ec704" (UID: "65d95395-5aea-4546-b12a-ec8ce58ec704"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.751210 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-config-data" (OuterVolumeSpecName: "config-data") pod "65d95395-5aea-4546-b12a-ec8ce58ec704" (UID: "65d95395-5aea-4546-b12a-ec8ce58ec704"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.755160 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.755193 4869 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.755205 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.755932 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "65d95395-5aea-4546-b12a-ec8ce58ec704" (UID: "65d95395-5aea-4546-b12a-ec8ce58ec704"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.761595 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.771895 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.780816 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.785685 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="15b1a123-3831-4fa6-bc52-3f0cf30953f9" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.102:5671: connect: connection refused" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.788950 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.792623 4869 generic.go:334] "Generic (PLEG): container finished" podID="b94e67f1-cfa7-4470-96ad-440a78a7707e" containerID="d6f67b2e1962982646c99be9310fe46368582436c0f28ba1d79bd9af395475fc" exitCode=0 Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.792648 4869 generic.go:334] "Generic (PLEG): container finished" podID="b94e67f1-cfa7-4470-96ad-440a78a7707e" containerID="27b5ea189bd4822ad549497c90e0af47c89f4bcaf3a147f407784ea7d6d2c6c9" exitCode=2 Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.792656 4869 generic.go:334] "Generic (PLEG): container finished" podID="b94e67f1-cfa7-4470-96ad-440a78a7707e" containerID="2df8b1eba74a41c2b063c8fbcf197a06c9966c6a5ba096d492ead9e41ab7aa2e" exitCode=0 Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.792663 4869 generic.go:334] "Generic (PLEG): container finished" podID="b94e67f1-cfa7-4470-96ad-440a78a7707e" containerID="c37c781ec6bbb85eee14167c524d88c4bc0c5851de4283fda9feb6b69d175421" exitCode=0 Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.793149 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b94e67f1-cfa7-4470-96ad-440a78a7707e","Type":"ContainerDied","Data":"d6f67b2e1962982646c99be9310fe46368582436c0f28ba1d79bd9af395475fc"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.793192 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b94e67f1-cfa7-4470-96ad-440a78a7707e","Type":"ContainerDied","Data":"27b5ea189bd4822ad549497c90e0af47c89f4bcaf3a147f407784ea7d6d2c6c9"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.793207 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b94e67f1-cfa7-4470-96ad-440a78a7707e","Type":"ContainerDied","Data":"2df8b1eba74a41c2b063c8fbcf197a06c9966c6a5ba096d492ead9e41ab7aa2e"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.793217 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b94e67f1-cfa7-4470-96ad-440a78a7707e","Type":"ContainerDied","Data":"c37c781ec6bbb85eee14167c524d88c4bc0c5851de4283fda9feb6b69d175421"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.793226 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b94e67f1-cfa7-4470-96ad-440a78a7707e","Type":"ContainerDied","Data":"a40ff370ee160cb0dc495721894588c50dc0bcf1dc3537808526a9e3ba9be1b8"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.793236 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a40ff370ee160cb0dc495721894588c50dc0bcf1dc3537808526a9e3ba9be1b8" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.796618 4869 generic.go:334] "Generic (PLEG): container finished" podID="2ae8a334-b758-420e-8aae-a3f6437f9816" containerID="75cb85d47c4f23763e64b6970bb9222234b6d481a8bbac78888a76d4dd1f8613" exitCode=0 Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.796645 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2ae8a334-b758-420e-8aae-a3f6437f9816","Type":"ContainerDied","Data":"75cb85d47c4f23763e64b6970bb9222234b6d481a8bbac78888a76d4dd1f8613"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.796883 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2ae8a334-b758-420e-8aae-a3f6437f9816","Type":"ContainerDied","Data":"d2cd7dce626a347e3be5349e2b82b8ec5b0a9fe93254bd37de31e53fb8fdb755"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.796970 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d2cd7dce626a347e3be5349e2b82b8ec5b0a9fe93254bd37de31e53fb8fdb755" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.798575 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-644f9f48bf-ccrr2"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.802152 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"65d95395-5aea-4546-b12a-ec8ce58ec704","Type":"ContainerDied","Data":"285bbd2e68eb2b69460784c677c91f293ed93525c17faea8c5f055f2b074fdbc"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.802194 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.804437 4869 generic.go:334] "Generic (PLEG): container finished" podID="ec34c29c-665f-465a-99d0-c342aca2cf14" containerID="6f58056d40518f7f08d5b89fddc6140fcb4975e4b4047401e6bcc1e12b2f6a6f" exitCode=0 Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.804518 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ec34c29c-665f-465a-99d0-c342aca2cf14","Type":"ContainerDied","Data":"6f58056d40518f7f08d5b89fddc6140fcb4975e4b4047401e6bcc1e12b2f6a6f"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.804948 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ec34c29c-665f-465a-99d0-c342aca2cf14","Type":"ContainerDied","Data":"cf5240fbd9e31e278a86a8029cc43333cf57bd4501f7d0846360eeeec2d69c90"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.805027 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cf5240fbd9e31e278a86a8029cc43333cf57bd4501f7d0846360eeeec2d69c90" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.806526 4869 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/root-account-create-update-zjpp9" secret="" err="secret \"galera-openstack-dockercfg-nmg6z\" not found" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.806569 4869 scope.go:117] "RemoveContainer" containerID="bf97f8988324fe855d157ff3d7e5e4eff62fa9c240599bdf67db6d8726b0793a" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:12.806936 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-create-update pod=root-account-create-update-zjpp9_openstack(f5b9f902-0038-4057-b1c2-66222926c1b5)\"" pod="openstack/root-account-create-update-zjpp9" podUID="f5b9f902-0038-4057-b1c2-66222926c1b5" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.811851 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.817558 4869 generic.go:334] "Generic (PLEG): container finished" podID="bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b" containerID="6e3f92f98f69d9675547c36cfbba2bee734bf6e6196d8d62fa53a9ae6f597e60" exitCode=0 Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.817642 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b","Type":"ContainerDied","Data":"6e3f92f98f69d9675547c36cfbba2bee734bf6e6196d8d62fa53a9ae6f597e60"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.822600 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-proxy-644f9f48bf-ccrr2"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.825596 4869 generic.go:334] "Generic (PLEG): container finished" podID="d098b42f-f300-4308-93b0-fe2af785ce4c" containerID="b9690da5434b6d4146f8eab01da1057397213e30a8e072731befa7042dbba543" exitCode=2 Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.825760 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d098b42f-f300-4308-93b0-fe2af785ce4c","Type":"ContainerDied","Data":"b9690da5434b6d4146f8eab01da1057397213e30a8e072731befa7042dbba543"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.825826 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d098b42f-f300-4308-93b0-fe2af785ce4c","Type":"ContainerDied","Data":"a88ac13c5eaf94c8b1980d72f0808839522a46283692e4ed1e72cc48d45196c4"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.825841 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a88ac13c5eaf94c8b1980d72f0808839522a46283692e4ed1e72cc48d45196c4" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.831674 4869 generic.go:334] "Generic (PLEG): container finished" podID="d945c4ca-288d-4e49-9048-b66894b7e97f" containerID="f5b28fa841921137066788f807369cfb234084509157d818ea001da022898ab8" exitCode=0 Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.831781 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d945c4ca-288d-4e49-9048-b66894b7e97f","Type":"ContainerDied","Data":"f5b28fa841921137066788f807369cfb234084509157d818ea001da022898ab8"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.834846 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-86a6-account-create-update-ch52c"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.835516 4869 generic.go:334] "Generic (PLEG): container finished" podID="4f7d516c-1685-4033-891f-64008f56a468" containerID="dcbc7f36a647ccce1c4f9cac0b03cc3ed28f4b6d411a2de239d056af9abe8648" exitCode=0 Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.835555 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4f7d516c-1685-4033-891f-64008f56a468","Type":"ContainerDied","Data":"dcbc7f36a647ccce1c4f9cac0b03cc3ed28f4b6d411a2de239d056af9abe8648"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.835571 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4f7d516c-1685-4033-891f-64008f56a468","Type":"ContainerDied","Data":"6940607f480a945cedc3b03567a67b4d0c2cbf60d506d036567107979dcc4200"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.835583 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6940607f480a945cedc3b03567a67b4d0c2cbf60d506d036567107979dcc4200" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.841023 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-86a6-account-create-update-ch52c"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.842272 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-587f84cd84-zqhxn" event={"ID":"323f79a2-48c7-4768-8707-23bc31755a50","Type":"ContainerDied","Data":"9e132362e5111dcd2f186519d8e686be77ef125488330c262e029b9f99383b2c"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.842420 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-587f84cd84-zqhxn" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.847416 4869 generic.go:334] "Generic (PLEG): container finished" podID="74632136-6311-4daa-80c7-4c32c20d6a4a" containerID="92f17e6ef177f1d7c2a6e4d1a20a973d7e9064773ac8ae9ff622cf49961a940b" exitCode=0 Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.847482 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-9fb998c86-5qb5j" event={"ID":"74632136-6311-4daa-80c7-4c32c20d6a4a","Type":"ContainerDied","Data":"92f17e6ef177f1d7c2a6e4d1a20a973d7e9064773ac8ae9ff622cf49961a940b"} Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.849095 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cd90-account-create-update-7lmjl" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:12.871612 4869 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:12.871727 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/f5b9f902-0038-4057-b1c2-66222926c1b5-operator-scripts podName:f5b9f902-0038-4057-b1c2-66222926c1b5 nodeName:}" failed. No retries permitted until 2026-01-30 11:16:13.371690769 +0000 UTC m=+1323.921566835 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/f5b9f902-0038-4057-b1c2-66222926c1b5-operator-scripts") pod "root-account-create-update-zjpp9" (UID: "f5b9f902-0038-4057-b1c2-66222926c1b5") : configmap "openstack-scripts" not found Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.876666 4869 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65d95395-5aea-4546-b12a-ec8ce58ec704-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.945112 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.948330 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.948583 4869 scope.go:117] "RemoveContainer" containerID="8df9d8157f152c454f2d5faa24504c9f3a81b6b09571d72c7ebea240aaa6074b" Jan 30 11:16:12 crc kubenswrapper[4869]: E0130 11:16:12.949493 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8df9d8157f152c454f2d5faa24504c9f3a81b6b09571d72c7ebea240aaa6074b\": container with ID starting with 8df9d8157f152c454f2d5faa24504c9f3a81b6b09571d72c7ebea240aaa6074b not found: ID does not exist" containerID="8df9d8157f152c454f2d5faa24504c9f3a81b6b09571d72c7ebea240aaa6074b" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.949528 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8df9d8157f152c454f2d5faa24504c9f3a81b6b09571d72c7ebea240aaa6074b"} err="failed to get container status \"8df9d8157f152c454f2d5faa24504c9f3a81b6b09571d72c7ebea240aaa6074b\": rpc error: code = NotFound desc = could not find container \"8df9d8157f152c454f2d5faa24504c9f3a81b6b09571d72c7ebea240aaa6074b\": container with ID starting with 8df9d8157f152c454f2d5faa24504c9f3a81b6b09571d72c7ebea240aaa6074b not found: ID does not exist" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.949556 4869 scope.go:117] "RemoveContainer" containerID="21d82ca792ac006fe155b7cbdd156ead01161c135d9e86487b0d642dfa345111" Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.966956 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-587f84cd84-zqhxn"] Jan 30 11:16:12 crc kubenswrapper[4869]: I0130 11:16:12.978900 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-587f84cd84-zqhxn"] Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.004496 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.012932 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.020288 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.023011 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.024419 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.032032 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.036582 4869 scope.go:117] "RemoveContainer" containerID="c1377e5a89b886398734afab4df613f251b21e463da188354d9a29304432a1e2" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.051315 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.055635 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.067621 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cd90-account-create-update-7lmjl" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.078552 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6pp7g\" (UniqueName: \"kubernetes.io/projected/4f7d516c-1685-4033-891f-64008f56a468-kube-api-access-6pp7g\") pod \"4f7d516c-1685-4033-891f-64008f56a468\" (UID: \"4f7d516c-1685-4033-891f-64008f56a468\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.078624 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f7d516c-1685-4033-891f-64008f56a468-public-tls-certs\") pod \"4f7d516c-1685-4033-891f-64008f56a468\" (UID: \"4f7d516c-1685-4033-891f-64008f56a468\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.078721 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/d098b42f-f300-4308-93b0-fe2af785ce4c-kube-state-metrics-tls-config\") pod \"d098b42f-f300-4308-93b0-fe2af785ce4c\" (UID: \"d098b42f-f300-4308-93b0-fe2af785ce4c\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.078758 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d098b42f-f300-4308-93b0-fe2af785ce4c-combined-ca-bundle\") pod \"d098b42f-f300-4308-93b0-fe2af785ce4c\" (UID: \"d098b42f-f300-4308-93b0-fe2af785ce4c\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.078795 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4pqpj\" (UniqueName: \"kubernetes.io/projected/d098b42f-f300-4308-93b0-fe2af785ce4c-kube-api-access-4pqpj\") pod \"d098b42f-f300-4308-93b0-fe2af785ce4c\" (UID: \"d098b42f-f300-4308-93b0-fe2af785ce4c\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.078839 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f7d516c-1685-4033-891f-64008f56a468-combined-ca-bundle\") pod \"4f7d516c-1685-4033-891f-64008f56a468\" (UID: \"4f7d516c-1685-4033-891f-64008f56a468\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.078886 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/d098b42f-f300-4308-93b0-fe2af785ce4c-kube-state-metrics-tls-certs\") pod \"d098b42f-f300-4308-93b0-fe2af785ce4c\" (UID: \"d098b42f-f300-4308-93b0-fe2af785ce4c\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.078919 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f7d516c-1685-4033-891f-64008f56a468-config-data\") pod \"4f7d516c-1685-4033-891f-64008f56a468\" (UID: \"4f7d516c-1685-4033-891f-64008f56a468\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.078954 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f7d516c-1685-4033-891f-64008f56a468-internal-tls-certs\") pod \"4f7d516c-1685-4033-891f-64008f56a468\" (UID: \"4f7d516c-1685-4033-891f-64008f56a468\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.078991 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f7d516c-1685-4033-891f-64008f56a468-logs\") pod \"4f7d516c-1685-4033-891f-64008f56a468\" (UID: \"4f7d516c-1685-4033-891f-64008f56a468\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.084644 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f7d516c-1685-4033-891f-64008f56a468-logs" (OuterVolumeSpecName: "logs") pod "4f7d516c-1685-4033-891f-64008f56a468" (UID: "4f7d516c-1685-4033-891f-64008f56a468"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.097156 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f7d516c-1685-4033-891f-64008f56a468-kube-api-access-6pp7g" (OuterVolumeSpecName: "kube-api-access-6pp7g") pod "4f7d516c-1685-4033-891f-64008f56a468" (UID: "4f7d516c-1685-4033-891f-64008f56a468"). InnerVolumeSpecName "kube-api-access-6pp7g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: E0130 11:16:13.100944 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c is running failed: container process not found" containerID="805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.101043 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d098b42f-f300-4308-93b0-fe2af785ce4c-kube-api-access-4pqpj" (OuterVolumeSpecName: "kube-api-access-4pqpj") pod "d098b42f-f300-4308-93b0-fe2af785ce4c" (UID: "d098b42f-f300-4308-93b0-fe2af785ce4c"). InnerVolumeSpecName "kube-api-access-4pqpj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.109100 4869 scope.go:117] "RemoveContainer" containerID="e9752fd0d18f235bdd601cdc37759bd12b8f72d28d609ba99e7c988552e2f109" Jan 30 11:16:13 crc kubenswrapper[4869]: E0130 11:16:13.109324 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c is running failed: container process not found" containerID="805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 30 11:16:13 crc kubenswrapper[4869]: E0130 11:16:13.109507 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 30 11:16:13 crc kubenswrapper[4869]: E0130 11:16:13.112451 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c is running failed: container process not found" containerID="805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 30 11:16:13 crc kubenswrapper[4869]: E0130 11:16:13.112593 4869 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-jfzdq" podUID="e4264086-12ed-4655-9657-14083653d56d" containerName="ovsdb-server" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.118801 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 30 11:16:13 crc kubenswrapper[4869]: E0130 11:16:13.121066 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.148861 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f7d516c-1685-4033-891f-64008f56a468-config-data" (OuterVolumeSpecName: "config-data") pod "4f7d516c-1685-4033-891f-64008f56a468" (UID: "4f7d516c-1685-4033-891f-64008f56a468"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: E0130 11:16:13.149010 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 30 11:16:13 crc kubenswrapper[4869]: E0130 11:16:13.149051 4869 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-jfzdq" podUID="e4264086-12ed-4655-9657-14083653d56d" containerName="ovs-vswitchd" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.165610 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d098b42f-f300-4308-93b0-fe2af785ce4c-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "d098b42f-f300-4308-93b0-fe2af785ce4c" (UID: "d098b42f-f300-4308-93b0-fe2af785ce4c"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.172943 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d098b42f-f300-4308-93b0-fe2af785ce4c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d098b42f-f300-4308-93b0-fe2af785ce4c" (UID: "d098b42f-f300-4308-93b0-fe2af785ce4c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.178927 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f7d516c-1685-4033-891f-64008f56a468-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4f7d516c-1685-4033-891f-64008f56a468" (UID: "4f7d516c-1685-4033-891f-64008f56a468"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.184486 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-config-data\") pod \"ec34c29c-665f-465a-99d0-c342aca2cf14\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.184546 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-config-data\") pod \"b94e67f1-cfa7-4470-96ad-440a78a7707e\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.184580 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ec34c29c-665f-465a-99d0-c342aca2cf14-httpd-run\") pod \"ec34c29c-665f-465a-99d0-c342aca2cf14\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.184617 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-config-data\") pod \"74632136-6311-4daa-80c7-4c32c20d6a4a\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.184644 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbt7l\" (UniqueName: \"kubernetes.io/projected/b94e67f1-cfa7-4470-96ad-440a78a7707e-kube-api-access-qbt7l\") pod \"b94e67f1-cfa7-4470-96ad-440a78a7707e\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.184690 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec34c29c-665f-465a-99d0-c342aca2cf14-logs\") pod \"ec34c29c-665f-465a-99d0-c342aca2cf14\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.184744 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9dzr\" (UniqueName: \"kubernetes.io/projected/ec34c29c-665f-465a-99d0-c342aca2cf14-kube-api-access-t9dzr\") pod \"ec34c29c-665f-465a-99d0-c342aca2cf14\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.184776 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-sg-core-conf-yaml\") pod \"b94e67f1-cfa7-4470-96ad-440a78a7707e\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.184803 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-config-data-custom\") pod \"74632136-6311-4daa-80c7-4c32c20d6a4a\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.184830 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ae8a334-b758-420e-8aae-a3f6437f9816-scripts\") pod \"2ae8a334-b758-420e-8aae-a3f6437f9816\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.184855 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ec34c29c-665f-465a-99d0-c342aca2cf14\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.184882 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-scripts\") pod \"ec34c29c-665f-465a-99d0-c342aca2cf14\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.184909 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b94e67f1-cfa7-4470-96ad-440a78a7707e-run-httpd\") pod \"b94e67f1-cfa7-4470-96ad-440a78a7707e\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.184939 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-internal-tls-certs\") pod \"74632136-6311-4daa-80c7-4c32c20d6a4a\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.185003 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-combined-ca-bundle\") pod \"b94e67f1-cfa7-4470-96ad-440a78a7707e\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.185038 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-internal-tls-certs\") pod \"ec34c29c-665f-465a-99d0-c342aca2cf14\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.185088 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ae8a334-b758-420e-8aae-a3f6437f9816-config-data\") pod \"2ae8a334-b758-420e-8aae-a3f6437f9816\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.185112 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"2ae8a334-b758-420e-8aae-a3f6437f9816\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.185138 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ae8a334-b758-420e-8aae-a3f6437f9816-public-tls-certs\") pod \"2ae8a334-b758-420e-8aae-a3f6437f9816\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.185169 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-combined-ca-bundle\") pod \"ec34c29c-665f-465a-99d0-c342aca2cf14\" (UID: \"ec34c29c-665f-465a-99d0-c342aca2cf14\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.185192 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74632136-6311-4daa-80c7-4c32c20d6a4a-logs\") pod \"74632136-6311-4daa-80c7-4c32c20d6a4a\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.185225 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-ceilometer-tls-certs\") pod \"b94e67f1-cfa7-4470-96ad-440a78a7707e\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.185249 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-combined-ca-bundle\") pod \"74632136-6311-4daa-80c7-4c32c20d6a4a\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.185269 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ae8a334-b758-420e-8aae-a3f6437f9816-combined-ca-bundle\") pod \"2ae8a334-b758-420e-8aae-a3f6437f9816\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.185295 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b94e67f1-cfa7-4470-96ad-440a78a7707e-log-httpd\") pod \"b94e67f1-cfa7-4470-96ad-440a78a7707e\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.185326 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4tknc\" (UniqueName: \"kubernetes.io/projected/74632136-6311-4daa-80c7-4c32c20d6a4a-kube-api-access-4tknc\") pod \"74632136-6311-4daa-80c7-4c32c20d6a4a\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.185364 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-public-tls-certs\") pod \"74632136-6311-4daa-80c7-4c32c20d6a4a\" (UID: \"74632136-6311-4daa-80c7-4c32c20d6a4a\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.185389 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2ae8a334-b758-420e-8aae-a3f6437f9816-httpd-run\") pod \"2ae8a334-b758-420e-8aae-a3f6437f9816\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.185413 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ae8a334-b758-420e-8aae-a3f6437f9816-logs\") pod \"2ae8a334-b758-420e-8aae-a3f6437f9816\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.185448 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-scripts\") pod \"b94e67f1-cfa7-4470-96ad-440a78a7707e\" (UID: \"b94e67f1-cfa7-4470-96ad-440a78a7707e\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.185483 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ssnv2\" (UniqueName: \"kubernetes.io/projected/2ae8a334-b758-420e-8aae-a3f6437f9816-kube-api-access-ssnv2\") pod \"2ae8a334-b758-420e-8aae-a3f6437f9816\" (UID: \"2ae8a334-b758-420e-8aae-a3f6437f9816\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.186038 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f7d516c-1685-4033-891f-64008f56a468-logs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.186058 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6pp7g\" (UniqueName: \"kubernetes.io/projected/4f7d516c-1685-4033-891f-64008f56a468-kube-api-access-6pp7g\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.186074 4869 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/d098b42f-f300-4308-93b0-fe2af785ce4c-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.186087 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d098b42f-f300-4308-93b0-fe2af785ce4c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.186101 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4pqpj\" (UniqueName: \"kubernetes.io/projected/d098b42f-f300-4308-93b0-fe2af785ce4c-kube-api-access-4pqpj\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.186115 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f7d516c-1685-4033-891f-64008f56a468-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.186126 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f7d516c-1685-4033-891f-64008f56a468-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.187273 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b94e67f1-cfa7-4470-96ad-440a78a7707e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b94e67f1-cfa7-4470-96ad-440a78a7707e" (UID: "b94e67f1-cfa7-4470-96ad-440a78a7707e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.188419 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ae8a334-b758-420e-8aae-a3f6437f9816-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "2ae8a334-b758-420e-8aae-a3f6437f9816" (UID: "2ae8a334-b758-420e-8aae-a3f6437f9816"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.188729 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ae8a334-b758-420e-8aae-a3f6437f9816-logs" (OuterVolumeSpecName: "logs") pod "2ae8a334-b758-420e-8aae-a3f6437f9816" (UID: "2ae8a334-b758-420e-8aae-a3f6437f9816"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.188858 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b94e67f1-cfa7-4470-96ad-440a78a7707e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b94e67f1-cfa7-4470-96ad-440a78a7707e" (UID: "b94e67f1-cfa7-4470-96ad-440a78a7707e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.188990 4869 scope.go:117] "RemoveContainer" containerID="c09c667d7f6ac623e362575529ea4aab1bd220f1c6756e69b8ca1ef9977354ae" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.189847 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74632136-6311-4daa-80c7-4c32c20d6a4a-logs" (OuterVolumeSpecName: "logs") pod "74632136-6311-4daa-80c7-4c32c20d6a4a" (UID: "74632136-6311-4daa-80c7-4c32c20d6a4a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.194956 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec34c29c-665f-465a-99d0-c342aca2cf14-kube-api-access-t9dzr" (OuterVolumeSpecName: "kube-api-access-t9dzr") pod "ec34c29c-665f-465a-99d0-c342aca2cf14" (UID: "ec34c29c-665f-465a-99d0-c342aca2cf14"). InnerVolumeSpecName "kube-api-access-t9dzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.196525 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec34c29c-665f-465a-99d0-c342aca2cf14-logs" (OuterVolumeSpecName: "logs") pod "ec34c29c-665f-465a-99d0-c342aca2cf14" (UID: "ec34c29c-665f-465a-99d0-c342aca2cf14"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.197018 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec34c29c-665f-465a-99d0-c342aca2cf14-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ec34c29c-665f-465a-99d0-c342aca2cf14" (UID: "ec34c29c-665f-465a-99d0-c342aca2cf14"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.197171 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74632136-6311-4daa-80c7-4c32c20d6a4a-kube-api-access-4tknc" (OuterVolumeSpecName: "kube-api-access-4tknc") pod "74632136-6311-4daa-80c7-4c32c20d6a4a" (UID: "74632136-6311-4daa-80c7-4c32c20d6a4a"). InnerVolumeSpecName "kube-api-access-4tknc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.198992 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-scripts" (OuterVolumeSpecName: "scripts") pod "b94e67f1-cfa7-4470-96ad-440a78a7707e" (UID: "b94e67f1-cfa7-4470-96ad-440a78a7707e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.200431 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ae8a334-b758-420e-8aae-a3f6437f9816-kube-api-access-ssnv2" (OuterVolumeSpecName: "kube-api-access-ssnv2") pod "2ae8a334-b758-420e-8aae-a3f6437f9816" (UID: "2ae8a334-b758-420e-8aae-a3f6437f9816"). InnerVolumeSpecName "kube-api-access-ssnv2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.203957 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "ec34c29c-665f-465a-99d0-c342aca2cf14" (UID: "ec34c29c-665f-465a-99d0-c342aca2cf14"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.203970 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-scripts" (OuterVolumeSpecName: "scripts") pod "ec34c29c-665f-465a-99d0-c342aca2cf14" (UID: "ec34c29c-665f-465a-99d0-c342aca2cf14"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.215293 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ae8a334-b758-420e-8aae-a3f6437f9816-scripts" (OuterVolumeSpecName: "scripts") pod "2ae8a334-b758-420e-8aae-a3f6437f9816" (UID: "2ae8a334-b758-420e-8aae-a3f6437f9816"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.215519 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "2ae8a334-b758-420e-8aae-a3f6437f9816" (UID: "2ae8a334-b758-420e-8aae-a3f6437f9816"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.226804 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b94e67f1-cfa7-4470-96ad-440a78a7707e-kube-api-access-qbt7l" (OuterVolumeSpecName: "kube-api-access-qbt7l") pod "b94e67f1-cfa7-4470-96ad-440a78a7707e" (UID: "b94e67f1-cfa7-4470-96ad-440a78a7707e"). InnerVolumeSpecName "kube-api-access-qbt7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.226917 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "74632136-6311-4daa-80c7-4c32c20d6a4a" (UID: "74632136-6311-4daa-80c7-4c32c20d6a4a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.226956 4869 scope.go:117] "RemoveContainer" containerID="80fd0e1a128b125455b0f3582efabeca4bc9e7c9682db967e54b192f2d8a8aa0" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.245085 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.247054 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f7d516c-1685-4033-891f-64008f56a468-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4f7d516c-1685-4033-891f-64008f56a468" (UID: "4f7d516c-1685-4033-891f-64008f56a468"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.261958 4869 scope.go:117] "RemoveContainer" containerID="30e76731331681ee54fc2b12405950fb45c6d15c16f3d7f16a01c29ca55daa7e" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.281699 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d098b42f-f300-4308-93b0-fe2af785ce4c-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "d098b42f-f300-4308-93b0-fe2af785ce4c" (UID: "d098b42f-f300-4308-93b0-fe2af785ce4c"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.286975 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-combined-ca-bundle\") pod \"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b\" (UID: \"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.287133 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-kolla-config\") pod \"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b\" (UID: \"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.287173 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-config-data\") pod \"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b\" (UID: \"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.287199 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-memcached-tls-certs\") pod \"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b\" (UID: \"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.287365 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4645t\" (UniqueName: \"kubernetes.io/projected/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-kube-api-access-4645t\") pod \"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b\" (UID: \"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.287818 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b" (UID: "bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.287874 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbt7l\" (UniqueName: \"kubernetes.io/projected/b94e67f1-cfa7-4470-96ad-440a78a7707e-kube-api-access-qbt7l\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.287888 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec34c29c-665f-465a-99d0-c342aca2cf14-logs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.287898 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9dzr\" (UniqueName: \"kubernetes.io/projected/ec34c29c-665f-465a-99d0-c342aca2cf14-kube-api-access-t9dzr\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.287907 4869 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.287916 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ae8a334-b758-420e-8aae-a3f6437f9816-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.291945 4869 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.291971 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.292419 4869 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b94e67f1-cfa7-4470-96ad-440a78a7707e-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.292447 4869 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f7d516c-1685-4033-891f-64008f56a468-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.292473 4869 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.292516 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74632136-6311-4daa-80c7-4c32c20d6a4a-logs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.292529 4869 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b94e67f1-cfa7-4470-96ad-440a78a7707e-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.292542 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4tknc\" (UniqueName: \"kubernetes.io/projected/74632136-6311-4daa-80c7-4c32c20d6a4a-kube-api-access-4tknc\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.292610 4869 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2ae8a334-b758-420e-8aae-a3f6437f9816-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.292629 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ae8a334-b758-420e-8aae-a3f6437f9816-logs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.292665 4869 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/d098b42f-f300-4308-93b0-fe2af785ce4c-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.292680 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.292693 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ssnv2\" (UniqueName: \"kubernetes.io/projected/2ae8a334-b758-420e-8aae-a3f6437f9816-kube-api-access-ssnv2\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.292731 4869 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ec34c29c-665f-465a-99d0-c342aca2cf14-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.296166 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-config-data" (OuterVolumeSpecName: "config-data") pod "bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b" (UID: "bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.298697 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-kube-api-access-4645t" (OuterVolumeSpecName: "kube-api-access-4645t") pod "bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b" (UID: "bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b"). InnerVolumeSpecName "kube-api-access-4645t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.304154 4869 scope.go:117] "RemoveContainer" containerID="e05f057e1226a4ecd5362991160142f5969097d16d333ab689e4639d3978f4f8" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.308455 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f7d516c-1685-4033-891f-64008f56a468-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4f7d516c-1685-4033-891f-64008f56a468" (UID: "4f7d516c-1685-4033-891f-64008f56a468"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.321887 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ae8a334-b758-420e-8aae-a3f6437f9816-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2ae8a334-b758-420e-8aae-a3f6437f9816" (UID: "2ae8a334-b758-420e-8aae-a3f6437f9816"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.342494 4869 scope.go:117] "RemoveContainer" containerID="2d8084a7cade6c321549a9768cbcd158ea761ca605ab57b0333c081b4ad26652" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.344306 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ae8a334-b758-420e-8aae-a3f6437f9816-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "2ae8a334-b758-420e-8aae-a3f6437f9816" (UID: "2ae8a334-b758-420e-8aae-a3f6437f9816"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.357065 4869 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.373576 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b" (UID: "bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.379491 4869 scope.go:117] "RemoveContainer" containerID="f379626d704637c04266aaeddc7b56416c9104be8850dbe4859f262fe2550259" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.388768 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b94e67f1-cfa7-4470-96ad-440a78a7707e" (UID: "b94e67f1-cfa7-4470-96ad-440a78a7707e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.393853 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gx5k6\" (UniqueName: \"kubernetes.io/projected/d945c4ca-288d-4e49-9048-b66894b7e97f-kube-api-access-gx5k6\") pod \"d945c4ca-288d-4e49-9048-b66894b7e97f\" (UID: \"d945c4ca-288d-4e49-9048-b66894b7e97f\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.394086 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d945c4ca-288d-4e49-9048-b66894b7e97f-logs\") pod \"d945c4ca-288d-4e49-9048-b66894b7e97f\" (UID: \"d945c4ca-288d-4e49-9048-b66894b7e97f\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.394404 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d945c4ca-288d-4e49-9048-b66894b7e97f-nova-metadata-tls-certs\") pod \"d945c4ca-288d-4e49-9048-b66894b7e97f\" (UID: \"d945c4ca-288d-4e49-9048-b66894b7e97f\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.394564 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d945c4ca-288d-4e49-9048-b66894b7e97f-combined-ca-bundle\") pod \"d945c4ca-288d-4e49-9048-b66894b7e97f\" (UID: \"d945c4ca-288d-4e49-9048-b66894b7e97f\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.394683 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d945c4ca-288d-4e49-9048-b66894b7e97f-config-data\") pod \"d945c4ca-288d-4e49-9048-b66894b7e97f\" (UID: \"d945c4ca-288d-4e49-9048-b66894b7e97f\") " Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.396456 4869 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.396686 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.396824 4869 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.396995 4869 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ae8a334-b758-420e-8aae-a3f6437f9816-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.397099 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ae8a334-b758-420e-8aae-a3f6437f9816-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.397199 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4645t\" (UniqueName: \"kubernetes.io/projected/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-kube-api-access-4645t\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.397461 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.397570 4869 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f7d516c-1685-4033-891f-64008f56a468-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.397810 4869 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: E0130 11:16:13.398075 4869 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 30 11:16:13 crc kubenswrapper[4869]: E0130 11:16:13.398566 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/f5b9f902-0038-4057-b1c2-66222926c1b5-operator-scripts podName:f5b9f902-0038-4057-b1c2-66222926c1b5 nodeName:}" failed. No retries permitted until 2026-01-30 11:16:14.398543904 +0000 UTC m=+1324.948420030 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/f5b9f902-0038-4057-b1c2-66222926c1b5-operator-scripts") pod "root-account-create-update-zjpp9" (UID: "f5b9f902-0038-4057-b1c2-66222926c1b5") : configmap "openstack-scripts" not found Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.407075 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d945c4ca-288d-4e49-9048-b66894b7e97f-logs" (OuterVolumeSpecName: "logs") pod "d945c4ca-288d-4e49-9048-b66894b7e97f" (UID: "d945c4ca-288d-4e49-9048-b66894b7e97f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.414666 4869 scope.go:117] "RemoveContainer" containerID="d02ef15f5ba93cc3d7b5586f76cca1e5e8d3253af837813c4d8c7db13197b4d6" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.416594 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ec34c29c-665f-465a-99d0-c342aca2cf14" (UID: "ec34c29c-665f-465a-99d0-c342aca2cf14"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.416762 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d945c4ca-288d-4e49-9048-b66894b7e97f-kube-api-access-gx5k6" (OuterVolumeSpecName: "kube-api-access-gx5k6") pod "d945c4ca-288d-4e49-9048-b66894b7e97f" (UID: "d945c4ca-288d-4e49-9048-b66894b7e97f"). InnerVolumeSpecName "kube-api-access-gx5k6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.420683 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ec34c29c-665f-465a-99d0-c342aca2cf14" (UID: "ec34c29c-665f-465a-99d0-c342aca2cf14"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.440588 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b94e67f1-cfa7-4470-96ad-440a78a7707e" (UID: "b94e67f1-cfa7-4470-96ad-440a78a7707e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.445161 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "74632136-6311-4daa-80c7-4c32c20d6a4a" (UID: "74632136-6311-4daa-80c7-4c32c20d6a4a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.460093 4869 scope.go:117] "RemoveContainer" containerID="740a2738961798bcc0a9b14e30b355bacd116adc8c93775ec46f0050fa91974f" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.468066 4869 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.468535 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-config-data" (OuterVolumeSpecName: "config-data") pod "ec34c29c-665f-465a-99d0-c342aca2cf14" (UID: "ec34c29c-665f-465a-99d0-c342aca2cf14"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.477892 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-config-data" (OuterVolumeSpecName: "config-data") pod "74632136-6311-4daa-80c7-4c32c20d6a4a" (UID: "74632136-6311-4daa-80c7-4c32c20d6a4a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.492593 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "74632136-6311-4daa-80c7-4c32c20d6a4a" (UID: "74632136-6311-4daa-80c7-4c32c20d6a4a"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.494563 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "74632136-6311-4daa-80c7-4c32c20d6a4a" (UID: "74632136-6311-4daa-80c7-4c32c20d6a4a"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.498897 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d945c4ca-288d-4e49-9048-b66894b7e97f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d945c4ca-288d-4e49-9048-b66894b7e97f" (UID: "d945c4ca-288d-4e49-9048-b66894b7e97f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.505928 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9txzz\" (UniqueName: \"kubernetes.io/projected/5339c4ee-a589-4517-bdbc-98f4f5dbb356-kube-api-access-9txzz\") pod \"keystone-cd90-account-create-update-7lmjl\" (UID: \"5339c4ee-a589-4517-bdbc-98f4f5dbb356\") " pod="openstack/keystone-cd90-account-create-update-7lmjl" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.505998 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5339c4ee-a589-4517-bdbc-98f4f5dbb356-operator-scripts\") pod \"keystone-cd90-account-create-update-7lmjl\" (UID: \"5339c4ee-a589-4517-bdbc-98f4f5dbb356\") " pod="openstack/keystone-cd90-account-create-update-7lmjl" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.506071 4869 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.506083 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.506092 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gx5k6\" (UniqueName: \"kubernetes.io/projected/d945c4ca-288d-4e49-9048-b66894b7e97f-kube-api-access-gx5k6\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.506103 4869 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.506112 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d945c4ca-288d-4e49-9048-b66894b7e97f-logs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.506121 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.506130 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.506138 4869 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.506147 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d945c4ca-288d-4e49-9048-b66894b7e97f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.516577 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec34c29c-665f-465a-99d0-c342aca2cf14-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.516610 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74632136-6311-4daa-80c7-4c32c20d6a4a-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.516623 4869 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: E0130 11:16:13.506438 4869 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 30 11:16:13 crc kubenswrapper[4869]: E0130 11:16:13.516747 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5339c4ee-a589-4517-bdbc-98f4f5dbb356-operator-scripts podName:5339c4ee-a589-4517-bdbc-98f4f5dbb356 nodeName:}" failed. No retries permitted until 2026-01-30 11:16:15.5167208 +0000 UTC m=+1326.066596866 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/5339c4ee-a589-4517-bdbc-98f4f5dbb356-operator-scripts") pod "keystone-cd90-account-create-update-7lmjl" (UID: "5339c4ee-a589-4517-bdbc-98f4f5dbb356") : configmap "openstack-scripts" not found Jan 30 11:16:13 crc kubenswrapper[4869]: E0130 11:16:13.510970 4869 projected.go:194] Error preparing data for projected volume kube-api-access-9txzz for pod openstack/keystone-cd90-account-create-update-7lmjl: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 30 11:16:13 crc kubenswrapper[4869]: E0130 11:16:13.517290 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/5339c4ee-a589-4517-bdbc-98f4f5dbb356-kube-api-access-9txzz podName:5339c4ee-a589-4517-bdbc-98f4f5dbb356 nodeName:}" failed. No retries permitted until 2026-01-30 11:16:15.517230255 +0000 UTC m=+1326.067106571 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-9txzz" (UniqueName: "kubernetes.io/projected/5339c4ee-a589-4517-bdbc-98f4f5dbb356-kube-api-access-9txzz") pod "keystone-cd90-account-create-update-7lmjl" (UID: "5339c4ee-a589-4517-bdbc-98f4f5dbb356") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.548613 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "b94e67f1-cfa7-4470-96ad-440a78a7707e" (UID: "b94e67f1-cfa7-4470-96ad-440a78a7707e"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.552097 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d945c4ca-288d-4e49-9048-b66894b7e97f-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "d945c4ca-288d-4e49-9048-b66894b7e97f" (UID: "d945c4ca-288d-4e49-9048-b66894b7e97f"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.585204 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d945c4ca-288d-4e49-9048-b66894b7e97f-config-data" (OuterVolumeSpecName: "config-data") pod "d945c4ca-288d-4e49-9048-b66894b7e97f" (UID: "d945c4ca-288d-4e49-9048-b66894b7e97f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.595893 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ae8a334-b758-420e-8aae-a3f6437f9816-config-data" (OuterVolumeSpecName: "config-data") pod "2ae8a334-b758-420e-8aae-a3f6437f9816" (UID: "2ae8a334-b758-420e-8aae-a3f6437f9816"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.609884 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b" (UID: "bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.610863 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-config-data" (OuterVolumeSpecName: "config-data") pod "b94e67f1-cfa7-4470-96ad-440a78a7707e" (UID: "b94e67f1-cfa7-4470-96ad-440a78a7707e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.624026 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.624072 4869 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.624119 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ae8a334-b758-420e-8aae-a3f6437f9816-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.624131 4869 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b94e67f1-cfa7-4470-96ad-440a78a7707e-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.624142 4869 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d945c4ca-288d-4e49-9048-b66894b7e97f-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.624152 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d945c4ca-288d-4e49-9048-b66894b7e97f-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:13 crc kubenswrapper[4869]: E0130 11:16:13.670608 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f692aea75d830feaf6c4d4138b6ab879aba7ddaa94979a62ad5644fdd0c6ddf5" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 30 11:16:13 crc kubenswrapper[4869]: E0130 11:16:13.675094 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f692aea75d830feaf6c4d4138b6ab879aba7ddaa94979a62ad5644fdd0c6ddf5" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 30 11:16:13 crc kubenswrapper[4869]: E0130 11:16:13.676610 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f692aea75d830feaf6c4d4138b6ab879aba7ddaa94979a62ad5644fdd0c6ddf5" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 30 11:16:13 crc kubenswrapper[4869]: E0130 11:16:13.676694 4869 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="493ac356-9bec-4285-850c-8e3c7739641e" containerName="ovn-northd" Jan 30 11:16:13 crc kubenswrapper[4869]: E0130 11:16:13.726153 4869 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 30 11:16:13 crc kubenswrapper[4869]: E0130 11:16:13.726403 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-config-data podName:4d1e4183-a136-428f-9bd8-e857a603da8f nodeName:}" failed. No retries permitted until 2026-01-30 11:16:21.726383314 +0000 UTC m=+1332.276259380 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-config-data") pod "rabbitmq-server-0" (UID: "4d1e4183-a136-428f-9bd8-e857a603da8f") : configmap "rabbitmq-config-data" not found Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.763363 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-gm6nb" podUID="f7eb4552-ad08-470d-b4c5-63c937f11717" containerName="ovn-controller" probeResult="failure" output="command timed out" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.796386 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-gm6nb" podUID="f7eb4552-ad08-470d-b4c5-63c937f11717" containerName="ovn-controller" probeResult="failure" output=< Jan 30 11:16:13 crc kubenswrapper[4869]: ERROR - Failed to get connection status from ovn-controller, ovn-appctl exit status: 0 Jan 30 11:16:13 crc kubenswrapper[4869]: > Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.859738 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.860327 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b","Type":"ContainerDied","Data":"0062f25b9c2855632489f4d07e01ff1e11614a600b576496c0f151bc82a1c9ba"} Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.860374 4869 scope.go:117] "RemoveContainer" containerID="6e3f92f98f69d9675547c36cfbba2bee734bf6e6196d8d62fa53a9ae6f597e60" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.869557 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-9fb998c86-5qb5j" event={"ID":"74632136-6311-4daa-80c7-4c32c20d6a4a","Type":"ContainerDied","Data":"35003dedbc247aed0f9c674563b373d14134f54452e5ef2576060b1927a3c1e7"} Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.869693 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-9fb998c86-5qb5j" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.880532 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d945c4ca-288d-4e49-9048-b66894b7e97f","Type":"ContainerDied","Data":"cae39af33b79d68b57bb61d4f9858146a971c206a9ac2deefec629e83bda6d12"} Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.880626 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.903417 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.906722 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.907014 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.907140 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.907272 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.907299 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cd90-account-create-update-7lmjl" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.921725 4869 scope.go:117] "RemoveContainer" containerID="92f17e6ef177f1d7c2a6e4d1a20a973d7e9064773ac8ae9ff622cf49961a940b" Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.929095 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.935777 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/memcached-0"] Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.942151 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-9fb998c86-5qb5j"] Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.954760 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-9fb998c86-5qb5j"] Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.958191 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 11:16:13 crc kubenswrapper[4869]: E0130 11:16:13.960256 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b0b2edafa16cdb86c907a900b6834383a3f251d3d25ba3b9fa14aabedce02a03 is running failed: container process not found" containerID="b0b2edafa16cdb86c907a900b6834383a3f251d3d25ba3b9fa14aabedce02a03" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 30 11:16:13 crc kubenswrapper[4869]: E0130 11:16:13.960913 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b0b2edafa16cdb86c907a900b6834383a3f251d3d25ba3b9fa14aabedce02a03 is running failed: container process not found" containerID="b0b2edafa16cdb86c907a900b6834383a3f251d3d25ba3b9fa14aabedce02a03" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 30 11:16:13 crc kubenswrapper[4869]: E0130 11:16:13.962804 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b0b2edafa16cdb86c907a900b6834383a3f251d3d25ba3b9fa14aabedce02a03 is running failed: container process not found" containerID="b0b2edafa16cdb86c907a900b6834383a3f251d3d25ba3b9fa14aabedce02a03" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 30 11:16:13 crc kubenswrapper[4869]: I0130 11:16:13.963066 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 11:16:13 crc kubenswrapper[4869]: E0130 11:16:13.962842 4869 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b0b2edafa16cdb86c907a900b6834383a3f251d3d25ba3b9fa14aabedce02a03 is running failed: container process not found" probeType="Readiness" pod="openstack/openstack-galera-0" podUID="34532f6a-b213-422d-8126-d74d95c32497" containerName="galera" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.121940 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.132458 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.144681 4869 scope.go:117] "RemoveContainer" containerID="5c3a077c91d9559f388be79a584f5f0cc2987551bdedbe1808365a487d0f5ea3" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.162349 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f954356-d9a2-4183-9033-adf859e722e4" path="/var/lib/kubelet/pods/0f954356-d9a2-4183-9033-adf859e722e4/volumes" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.163449 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="104ca851-1c21-41bd-8a92-423fdab83753" path="/var/lib/kubelet/pods/104ca851-1c21-41bd-8a92-423fdab83753/volumes" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.164294 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="323f79a2-48c7-4768-8707-23bc31755a50" path="/var/lib/kubelet/pods/323f79a2-48c7-4768-8707-23bc31755a50/volumes" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.169655 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45d8f6aa-887f-444b-81c8-7bf6c03993c9" path="/var/lib/kubelet/pods/45d8f6aa-887f-444b-81c8-7bf6c03993c9/volumes" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.170875 4869 scope.go:117] "RemoveContainer" containerID="f5b28fa841921137066788f807369cfb234084509157d818ea001da022898ab8" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.175728 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4618ceff-14a9-4866-aa22-e29767d8d7e4" path="/var/lib/kubelet/pods/4618ceff-14a9-4866-aa22-e29767d8d7e4/volumes" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.178019 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48318921-34ba-442b-b9f0-6f7057d5cdf5" path="/var/lib/kubelet/pods/48318921-34ba-442b-b9f0-6f7057d5cdf5/volumes" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.178492 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a07399e-d252-46b3-823d-3fabceb4b671" path="/var/lib/kubelet/pods/5a07399e-d252-46b3-823d-3fabceb4b671/volumes" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.179293 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65d95395-5aea-4546-b12a-ec8ce58ec704" path="/var/lib/kubelet/pods/65d95395-5aea-4546-b12a-ec8ce58ec704/volumes" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.180400 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74632136-6311-4daa-80c7-4c32c20d6a4a" path="/var/lib/kubelet/pods/74632136-6311-4daa-80c7-4c32c20d6a4a/volumes" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.183027 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9320a43-d34c-4f43-b304-8f6414a44b33" path="/var/lib/kubelet/pods/b9320a43-d34c-4f43-b304-8f6414a44b33/volumes" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.187301 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba3b2f08-608c-49db-b58c-f20480a51bba" path="/var/lib/kubelet/pods/ba3b2f08-608c-49db-b58c-f20480a51bba/volumes" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.189281 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b" path="/var/lib/kubelet/pods/bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b/volumes" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.201160 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d098b42f-f300-4308-93b0-fe2af785ce4c" path="/var/lib/kubelet/pods/d098b42f-f300-4308-93b0-fe2af785ce4c/volumes" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.201778 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d945c4ca-288d-4e49-9048-b66894b7e97f" path="/var/lib/kubelet/pods/d945c4ca-288d-4e49-9048-b66894b7e97f/volumes" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.203525 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f" path="/var/lib/kubelet/pods/e72fb5b4-6762-4a1b-aae6-f2cbf4b7f69f/volumes" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.204644 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8f9cd63-d585-4053-b25b-3c0947f43755" path="/var/lib/kubelet/pods/f8f9cd63-d585-4053-b25b-3c0947f43755/volumes" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.205214 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.205247 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.205261 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.205272 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.206955 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.228110 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.239663 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-cd90-account-create-update-7lmjl"] Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.246493 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-cd90-account-create-update-7lmjl"] Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.253892 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.260594 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.277781 4869 scope.go:117] "RemoveContainer" containerID="393a2e711fafbe8c8153de69713ef0feb5af6833a38be6f6203958c4d1bd909f" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.405816 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-zjpp9" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.437134 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5339c4ee-a589-4517-bdbc-98f4f5dbb356-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.437161 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9txzz\" (UniqueName: \"kubernetes.io/projected/5339c4ee-a589-4517-bdbc-98f4f5dbb356-kube-api-access-9txzz\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:14 crc kubenswrapper[4869]: E0130 11:16:14.437328 4869 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 30 11:16:14 crc kubenswrapper[4869]: E0130 11:16:14.437393 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/f5b9f902-0038-4057-b1c2-66222926c1b5-operator-scripts podName:f5b9f902-0038-4057-b1c2-66222926c1b5 nodeName:}" failed. No retries permitted until 2026-01-30 11:16:16.437377089 +0000 UTC m=+1326.987253155 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/f5b9f902-0038-4057-b1c2-66222926c1b5-operator-scripts") pod "root-account-create-update-zjpp9" (UID: "f5b9f902-0038-4057-b1c2-66222926c1b5") : configmap "openstack-scripts" not found Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.485119 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 30 11:16:14 crc kubenswrapper[4869]: E0130 11:16:14.488342 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f1f67a718d23c87ea020c3a69713e73dad35f9fc50127281c32e30e68f5980e5" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 30 11:16:14 crc kubenswrapper[4869]: E0130 11:16:14.513157 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f1f67a718d23c87ea020c3a69713e73dad35f9fc50127281c32e30e68f5980e5" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 30 11:16:14 crc kubenswrapper[4869]: E0130 11:16:14.523241 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f1f67a718d23c87ea020c3a69713e73dad35f9fc50127281c32e30e68f5980e5" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 30 11:16:14 crc kubenswrapper[4869]: E0130 11:16:14.523321 4869 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="439024e7-e7a3-42c4-b9a1-db6705ec33d2" containerName="nova-cell1-conductor-conductor" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.538398 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fv5c8\" (UniqueName: \"kubernetes.io/projected/f5b9f902-0038-4057-b1c2-66222926c1b5-kube-api-access-fv5c8\") pod \"f5b9f902-0038-4057-b1c2-66222926c1b5\" (UID: \"f5b9f902-0038-4057-b1c2-66222926c1b5\") " Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.538511 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5b9f902-0038-4057-b1c2-66222926c1b5-operator-scripts\") pod \"f5b9f902-0038-4057-b1c2-66222926c1b5\" (UID: \"f5b9f902-0038-4057-b1c2-66222926c1b5\") " Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.540099 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5b9f902-0038-4057-b1c2-66222926c1b5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f5b9f902-0038-4057-b1c2-66222926c1b5" (UID: "f5b9f902-0038-4057-b1c2-66222926c1b5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.548867 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5b9f902-0038-4057-b1c2-66222926c1b5-kube-api-access-fv5c8" (OuterVolumeSpecName: "kube-api-access-fv5c8") pod "f5b9f902-0038-4057-b1c2-66222926c1b5" (UID: "f5b9f902-0038-4057-b1c2-66222926c1b5"). InnerVolumeSpecName "kube-api-access-fv5c8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.639663 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xsdtz\" (UniqueName: \"kubernetes.io/projected/34532f6a-b213-422d-8126-d74d95c32497-kube-api-access-xsdtz\") pod \"34532f6a-b213-422d-8126-d74d95c32497\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.639762 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/34532f6a-b213-422d-8126-d74d95c32497-kolla-config\") pod \"34532f6a-b213-422d-8126-d74d95c32497\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.639864 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34532f6a-b213-422d-8126-d74d95c32497-combined-ca-bundle\") pod \"34532f6a-b213-422d-8126-d74d95c32497\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.639931 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"34532f6a-b213-422d-8126-d74d95c32497\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.639972 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/34532f6a-b213-422d-8126-d74d95c32497-config-data-generated\") pod \"34532f6a-b213-422d-8126-d74d95c32497\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.640041 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/34532f6a-b213-422d-8126-d74d95c32497-config-data-default\") pod \"34532f6a-b213-422d-8126-d74d95c32497\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.640074 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/34532f6a-b213-422d-8126-d74d95c32497-operator-scripts\") pod \"34532f6a-b213-422d-8126-d74d95c32497\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.640139 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/34532f6a-b213-422d-8126-d74d95c32497-galera-tls-certs\") pod \"34532f6a-b213-422d-8126-d74d95c32497\" (UID: \"34532f6a-b213-422d-8126-d74d95c32497\") " Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.640556 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5b9f902-0038-4057-b1c2-66222926c1b5-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.640577 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fv5c8\" (UniqueName: \"kubernetes.io/projected/f5b9f902-0038-4057-b1c2-66222926c1b5-kube-api-access-fv5c8\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.640723 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34532f6a-b213-422d-8126-d74d95c32497-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "34532f6a-b213-422d-8126-d74d95c32497" (UID: "34532f6a-b213-422d-8126-d74d95c32497"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.641159 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34532f6a-b213-422d-8126-d74d95c32497-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "34532f6a-b213-422d-8126-d74d95c32497" (UID: "34532f6a-b213-422d-8126-d74d95c32497"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.641173 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34532f6a-b213-422d-8126-d74d95c32497-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "34532f6a-b213-422d-8126-d74d95c32497" (UID: "34532f6a-b213-422d-8126-d74d95c32497"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.641730 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34532f6a-b213-422d-8126-d74d95c32497-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "34532f6a-b213-422d-8126-d74d95c32497" (UID: "34532f6a-b213-422d-8126-d74d95c32497"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.655618 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34532f6a-b213-422d-8126-d74d95c32497-kube-api-access-xsdtz" (OuterVolumeSpecName: "kube-api-access-xsdtz") pod "34532f6a-b213-422d-8126-d74d95c32497" (UID: "34532f6a-b213-422d-8126-d74d95c32497"). InnerVolumeSpecName "kube-api-access-xsdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.666913 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34532f6a-b213-422d-8126-d74d95c32497-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "34532f6a-b213-422d-8126-d74d95c32497" (UID: "34532f6a-b213-422d-8126-d74d95c32497"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.686110 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "mysql-db") pod "34532f6a-b213-422d-8126-d74d95c32497" (UID: "34532f6a-b213-422d-8126-d74d95c32497"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.703850 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34532f6a-b213-422d-8126-d74d95c32497-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "34532f6a-b213-422d-8126-d74d95c32497" (UID: "34532f6a-b213-422d-8126-d74d95c32497"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.741775 4869 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/34532f6a-b213-422d-8126-d74d95c32497-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.741822 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/34532f6a-b213-422d-8126-d74d95c32497-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.741856 4869 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/34532f6a-b213-422d-8126-d74d95c32497-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.741869 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xsdtz\" (UniqueName: \"kubernetes.io/projected/34532f6a-b213-422d-8126-d74d95c32497-kube-api-access-xsdtz\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.741885 4869 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/34532f6a-b213-422d-8126-d74d95c32497-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.741896 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34532f6a-b213-422d-8126-d74d95c32497-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.741930 4869 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.741943 4869 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/34532f6a-b213-422d-8126-d74d95c32497-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.770175 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_493ac356-9bec-4285-850c-8e3c7739641e/ovn-northd/0.log" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.770319 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.777383 4869 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.843609 4869 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.906529 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.921570 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_493ac356-9bec-4285-850c-8e3c7739641e/ovn-northd/0.log" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.921628 4869 generic.go:334] "Generic (PLEG): container finished" podID="493ac356-9bec-4285-850c-8e3c7739641e" containerID="f692aea75d830feaf6c4d4138b6ab879aba7ddaa94979a62ad5644fdd0c6ddf5" exitCode=139 Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.921687 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"493ac356-9bec-4285-850c-8e3c7739641e","Type":"ContainerDied","Data":"f692aea75d830feaf6c4d4138b6ab879aba7ddaa94979a62ad5644fdd0c6ddf5"} Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.921764 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"493ac356-9bec-4285-850c-8e3c7739641e","Type":"ContainerDied","Data":"e1a91e455ba523d7c830ca0009e54d665729b34b1a3bbc738c851fd1b7d766d6"} Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.921760 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.921795 4869 scope.go:117] "RemoveContainer" containerID="86b7a253a11330df89e12fcb2cd867c724eb1b70d728e6ac436f033ef2e552e4" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.923914 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-zjpp9" event={"ID":"f5b9f902-0038-4057-b1c2-66222926c1b5","Type":"ContainerDied","Data":"2aa4014e3720acc1e48b4d2149c02f5fb50fbaf7ba54b5987c76d2a7e243a2e7"} Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.923971 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-zjpp9" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.932471 4869 generic.go:334] "Generic (PLEG): container finished" podID="34532f6a-b213-422d-8126-d74d95c32497" containerID="b0b2edafa16cdb86c907a900b6834383a3f251d3d25ba3b9fa14aabedce02a03" exitCode=0 Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.932544 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"34532f6a-b213-422d-8126-d74d95c32497","Type":"ContainerDied","Data":"b0b2edafa16cdb86c907a900b6834383a3f251d3d25ba3b9fa14aabedce02a03"} Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.932574 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"34532f6a-b213-422d-8126-d74d95c32497","Type":"ContainerDied","Data":"4651430c60e7f7405e043cbe802bff71dc174764942041ab23b0d2423d4ff3ba"} Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.932664 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.944385 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/493ac356-9bec-4285-850c-8e3c7739641e-ovn-northd-tls-certs\") pod \"493ac356-9bec-4285-850c-8e3c7739641e\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.944530 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7f59r\" (UniqueName: \"kubernetes.io/projected/493ac356-9bec-4285-850c-8e3c7739641e-kube-api-access-7f59r\") pod \"493ac356-9bec-4285-850c-8e3c7739641e\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.944587 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/493ac356-9bec-4285-850c-8e3c7739641e-config\") pod \"493ac356-9bec-4285-850c-8e3c7739641e\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.944606 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/493ac356-9bec-4285-850c-8e3c7739641e-scripts\") pod \"493ac356-9bec-4285-850c-8e3c7739641e\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.944643 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/493ac356-9bec-4285-850c-8e3c7739641e-metrics-certs-tls-certs\") pod \"493ac356-9bec-4285-850c-8e3c7739641e\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.944663 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/493ac356-9bec-4285-850c-8e3c7739641e-ovn-rundir\") pod \"493ac356-9bec-4285-850c-8e3c7739641e\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.944691 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/493ac356-9bec-4285-850c-8e3c7739641e-combined-ca-bundle\") pod \"493ac356-9bec-4285-850c-8e3c7739641e\" (UID: \"493ac356-9bec-4285-850c-8e3c7739641e\") " Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.945529 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/493ac356-9bec-4285-850c-8e3c7739641e-scripts" (OuterVolumeSpecName: "scripts") pod "493ac356-9bec-4285-850c-8e3c7739641e" (UID: "493ac356-9bec-4285-850c-8e3c7739641e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.945664 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/493ac356-9bec-4285-850c-8e3c7739641e-config" (OuterVolumeSpecName: "config") pod "493ac356-9bec-4285-850c-8e3c7739641e" (UID: "493ac356-9bec-4285-850c-8e3c7739641e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.947188 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/493ac356-9bec-4285-850c-8e3c7739641e-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "493ac356-9bec-4285-850c-8e3c7739641e" (UID: "493ac356-9bec-4285-850c-8e3c7739641e"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.951049 4869 generic.go:334] "Generic (PLEG): container finished" podID="4d1e4183-a136-428f-9bd8-e857a603da8f" containerID="3fc1c2d02d30cc153ef6285ef6f4336e2c23b7fb060a908a71d5a2f75ee1261a" exitCode=0 Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.951157 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4d1e4183-a136-428f-9bd8-e857a603da8f","Type":"ContainerDied","Data":"3fc1c2d02d30cc153ef6285ef6f4336e2c23b7fb060a908a71d5a2f75ee1261a"} Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.951192 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4d1e4183-a136-428f-9bd8-e857a603da8f","Type":"ContainerDied","Data":"93214a0ee47d97a9edbed4bb8424a217ec2d4807de9d6b1d0c950cd38d0e3228"} Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.951280 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.951513 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/493ac356-9bec-4285-850c-8e3c7739641e-kube-api-access-7f59r" (OuterVolumeSpecName: "kube-api-access-7f59r") pod "493ac356-9bec-4285-850c-8e3c7739641e" (UID: "493ac356-9bec-4285-850c-8e3c7739641e"). InnerVolumeSpecName "kube-api-access-7f59r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.958016 4869 scope.go:117] "RemoveContainer" containerID="f692aea75d830feaf6c4d4138b6ab879aba7ddaa94979a62ad5644fdd0c6ddf5" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.968319 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-zjpp9"] Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.977015 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-zjpp9"] Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.995658 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/493ac356-9bec-4285-850c-8e3c7739641e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "493ac356-9bec-4285-850c-8e3c7739641e" (UID: "493ac356-9bec-4285-850c-8e3c7739641e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.997422 4869 scope.go:117] "RemoveContainer" containerID="86b7a253a11330df89e12fcb2cd867c724eb1b70d728e6ac436f033ef2e552e4" Jan 30 11:16:14 crc kubenswrapper[4869]: E0130 11:16:14.997940 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86b7a253a11330df89e12fcb2cd867c724eb1b70d728e6ac436f033ef2e552e4\": container with ID starting with 86b7a253a11330df89e12fcb2cd867c724eb1b70d728e6ac436f033ef2e552e4 not found: ID does not exist" containerID="86b7a253a11330df89e12fcb2cd867c724eb1b70d728e6ac436f033ef2e552e4" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.998015 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86b7a253a11330df89e12fcb2cd867c724eb1b70d728e6ac436f033ef2e552e4"} err="failed to get container status \"86b7a253a11330df89e12fcb2cd867c724eb1b70d728e6ac436f033ef2e552e4\": rpc error: code = NotFound desc = could not find container \"86b7a253a11330df89e12fcb2cd867c724eb1b70d728e6ac436f033ef2e552e4\": container with ID starting with 86b7a253a11330df89e12fcb2cd867c724eb1b70d728e6ac436f033ef2e552e4 not found: ID does not exist" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.998044 4869 scope.go:117] "RemoveContainer" containerID="f692aea75d830feaf6c4d4138b6ab879aba7ddaa94979a62ad5644fdd0c6ddf5" Jan 30 11:16:14 crc kubenswrapper[4869]: E0130 11:16:14.998398 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f692aea75d830feaf6c4d4138b6ab879aba7ddaa94979a62ad5644fdd0c6ddf5\": container with ID starting with f692aea75d830feaf6c4d4138b6ab879aba7ddaa94979a62ad5644fdd0c6ddf5 not found: ID does not exist" containerID="f692aea75d830feaf6c4d4138b6ab879aba7ddaa94979a62ad5644fdd0c6ddf5" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.998445 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f692aea75d830feaf6c4d4138b6ab879aba7ddaa94979a62ad5644fdd0c6ddf5"} err="failed to get container status \"f692aea75d830feaf6c4d4138b6ab879aba7ddaa94979a62ad5644fdd0c6ddf5\": rpc error: code = NotFound desc = could not find container \"f692aea75d830feaf6c4d4138b6ab879aba7ddaa94979a62ad5644fdd0c6ddf5\": container with ID starting with f692aea75d830feaf6c4d4138b6ab879aba7ddaa94979a62ad5644fdd0c6ddf5 not found: ID does not exist" Jan 30 11:16:14 crc kubenswrapper[4869]: I0130 11:16:14.998459 4869 scope.go:117] "RemoveContainer" containerID="bf97f8988324fe855d157ff3d7e5e4eff62fa9c240599bdf67db6d8726b0793a" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.000462 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.008628 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-galera-0"] Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.017554 4869 scope.go:117] "RemoveContainer" containerID="b0b2edafa16cdb86c907a900b6834383a3f251d3d25ba3b9fa14aabedce02a03" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.027873 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/493ac356-9bec-4285-850c-8e3c7739641e-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "493ac356-9bec-4285-850c-8e3c7739641e" (UID: "493ac356-9bec-4285-850c-8e3c7739641e"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.042577 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/493ac356-9bec-4285-850c-8e3c7739641e-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "493ac356-9bec-4285-850c-8e3c7739641e" (UID: "493ac356-9bec-4285-850c-8e3c7739641e"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.044139 4869 scope.go:117] "RemoveContainer" containerID="fcd0228d1f2d35a91d3fd73675fa6d1fade39ed016a110af5ddc57482eaff8f6" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.054537 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4d1e4183-a136-428f-9bd8-e857a603da8f-rabbitmq-plugins\") pod \"4d1e4183-a136-428f-9bd8-e857a603da8f\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.054602 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4d1e4183-a136-428f-9bd8-e857a603da8f-erlang-cookie-secret\") pod \"4d1e4183-a136-428f-9bd8-e857a603da8f\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.054629 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"4d1e4183-a136-428f-9bd8-e857a603da8f\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.054651 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4d1e4183-a136-428f-9bd8-e857a603da8f-rabbitmq-tls\") pod \"4d1e4183-a136-428f-9bd8-e857a603da8f\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.054697 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-plugins-conf\") pod \"4d1e4183-a136-428f-9bd8-e857a603da8f\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.054730 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64b5r\" (UniqueName: \"kubernetes.io/projected/4d1e4183-a136-428f-9bd8-e857a603da8f-kube-api-access-64b5r\") pod \"4d1e4183-a136-428f-9bd8-e857a603da8f\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.054780 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4d1e4183-a136-428f-9bd8-e857a603da8f-pod-info\") pod \"4d1e4183-a136-428f-9bd8-e857a603da8f\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.054810 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-server-conf\") pod \"4d1e4183-a136-428f-9bd8-e857a603da8f\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.054839 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4d1e4183-a136-428f-9bd8-e857a603da8f-rabbitmq-erlang-cookie\") pod \"4d1e4183-a136-428f-9bd8-e857a603da8f\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.054862 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-config-data\") pod \"4d1e4183-a136-428f-9bd8-e857a603da8f\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.054889 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4d1e4183-a136-428f-9bd8-e857a603da8f-rabbitmq-confd\") pod \"4d1e4183-a136-428f-9bd8-e857a603da8f\" (UID: \"4d1e4183-a136-428f-9bd8-e857a603da8f\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.055176 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7f59r\" (UniqueName: \"kubernetes.io/projected/493ac356-9bec-4285-850c-8e3c7739641e-kube-api-access-7f59r\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.055191 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/493ac356-9bec-4285-850c-8e3c7739641e-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.055200 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/493ac356-9bec-4285-850c-8e3c7739641e-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.055208 4869 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/493ac356-9bec-4285-850c-8e3c7739641e-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.055217 4869 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/493ac356-9bec-4285-850c-8e3c7739641e-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.055225 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/493ac356-9bec-4285-850c-8e3c7739641e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.055232 4869 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/493ac356-9bec-4285-850c-8e3c7739641e-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.055633 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d1e4183-a136-428f-9bd8-e857a603da8f-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "4d1e4183-a136-428f-9bd8-e857a603da8f" (UID: "4d1e4183-a136-428f-9bd8-e857a603da8f"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.060057 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d1e4183-a136-428f-9bd8-e857a603da8f-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "4d1e4183-a136-428f-9bd8-e857a603da8f" (UID: "4d1e4183-a136-428f-9bd8-e857a603da8f"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.060250 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d1e4183-a136-428f-9bd8-e857a603da8f-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "4d1e4183-a136-428f-9bd8-e857a603da8f" (UID: "4d1e4183-a136-428f-9bd8-e857a603da8f"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.063303 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/4d1e4183-a136-428f-9bd8-e857a603da8f-pod-info" (OuterVolumeSpecName: "pod-info") pod "4d1e4183-a136-428f-9bd8-e857a603da8f" (UID: "4d1e4183-a136-428f-9bd8-e857a603da8f"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.063457 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "4d1e4183-a136-428f-9bd8-e857a603da8f" (UID: "4d1e4183-a136-428f-9bd8-e857a603da8f"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.063472 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d1e4183-a136-428f-9bd8-e857a603da8f-kube-api-access-64b5r" (OuterVolumeSpecName: "kube-api-access-64b5r") pod "4d1e4183-a136-428f-9bd8-e857a603da8f" (UID: "4d1e4183-a136-428f-9bd8-e857a603da8f"). InnerVolumeSpecName "kube-api-access-64b5r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.067741 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "4d1e4183-a136-428f-9bd8-e857a603da8f" (UID: "4d1e4183-a136-428f-9bd8-e857a603da8f"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.070430 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d1e4183-a136-428f-9bd8-e857a603da8f-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "4d1e4183-a136-428f-9bd8-e857a603da8f" (UID: "4d1e4183-a136-428f-9bd8-e857a603da8f"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.084611 4869 scope.go:117] "RemoveContainer" containerID="b0b2edafa16cdb86c907a900b6834383a3f251d3d25ba3b9fa14aabedce02a03" Jan 30 11:16:15 crc kubenswrapper[4869]: E0130 11:16:15.085048 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0b2edafa16cdb86c907a900b6834383a3f251d3d25ba3b9fa14aabedce02a03\": container with ID starting with b0b2edafa16cdb86c907a900b6834383a3f251d3d25ba3b9fa14aabedce02a03 not found: ID does not exist" containerID="b0b2edafa16cdb86c907a900b6834383a3f251d3d25ba3b9fa14aabedce02a03" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.085098 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0b2edafa16cdb86c907a900b6834383a3f251d3d25ba3b9fa14aabedce02a03"} err="failed to get container status \"b0b2edafa16cdb86c907a900b6834383a3f251d3d25ba3b9fa14aabedce02a03\": rpc error: code = NotFound desc = could not find container \"b0b2edafa16cdb86c907a900b6834383a3f251d3d25ba3b9fa14aabedce02a03\": container with ID starting with b0b2edafa16cdb86c907a900b6834383a3f251d3d25ba3b9fa14aabedce02a03 not found: ID does not exist" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.085128 4869 scope.go:117] "RemoveContainer" containerID="fcd0228d1f2d35a91d3fd73675fa6d1fade39ed016a110af5ddc57482eaff8f6" Jan 30 11:16:15 crc kubenswrapper[4869]: E0130 11:16:15.085623 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fcd0228d1f2d35a91d3fd73675fa6d1fade39ed016a110af5ddc57482eaff8f6\": container with ID starting with fcd0228d1f2d35a91d3fd73675fa6d1fade39ed016a110af5ddc57482eaff8f6 not found: ID does not exist" containerID="fcd0228d1f2d35a91d3fd73675fa6d1fade39ed016a110af5ddc57482eaff8f6" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.085655 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcd0228d1f2d35a91d3fd73675fa6d1fade39ed016a110af5ddc57482eaff8f6"} err="failed to get container status \"fcd0228d1f2d35a91d3fd73675fa6d1fade39ed016a110af5ddc57482eaff8f6\": rpc error: code = NotFound desc = could not find container \"fcd0228d1f2d35a91d3fd73675fa6d1fade39ed016a110af5ddc57482eaff8f6\": container with ID starting with fcd0228d1f2d35a91d3fd73675fa6d1fade39ed016a110af5ddc57482eaff8f6 not found: ID does not exist" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.085676 4869 scope.go:117] "RemoveContainer" containerID="3fc1c2d02d30cc153ef6285ef6f4336e2c23b7fb060a908a71d5a2f75ee1261a" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.088772 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-config-data" (OuterVolumeSpecName: "config-data") pod "4d1e4183-a136-428f-9bd8-e857a603da8f" (UID: "4d1e4183-a136-428f-9bd8-e857a603da8f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.102073 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-server-conf" (OuterVolumeSpecName: "server-conf") pod "4d1e4183-a136-428f-9bd8-e857a603da8f" (UID: "4d1e4183-a136-428f-9bd8-e857a603da8f"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.141244 4869 scope.go:117] "RemoveContainer" containerID="55eceea13613475cd0e0edba738b2fea582c353eb4608bd2ab58553bd72ccab9" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.146359 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d1e4183-a136-428f-9bd8-e857a603da8f-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "4d1e4183-a136-428f-9bd8-e857a603da8f" (UID: "4d1e4183-a136-428f-9bd8-e857a603da8f"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.156826 4869 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4d1e4183-a136-428f-9bd8-e857a603da8f-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.156850 4869 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4d1e4183-a136-428f-9bd8-e857a603da8f-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.156859 4869 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4d1e4183-a136-428f-9bd8-e857a603da8f-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.156888 4869 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.156897 4869 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4d1e4183-a136-428f-9bd8-e857a603da8f-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.156905 4869 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.156914 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64b5r\" (UniqueName: \"kubernetes.io/projected/4d1e4183-a136-428f-9bd8-e857a603da8f-kube-api-access-64b5r\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.156923 4869 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4d1e4183-a136-428f-9bd8-e857a603da8f-pod-info\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.156931 4869 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-server-conf\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.156939 4869 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4d1e4183-a136-428f-9bd8-e857a603da8f-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.156947 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4d1e4183-a136-428f-9bd8-e857a603da8f-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.173221 4869 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.242624 4869 scope.go:117] "RemoveContainer" containerID="3fc1c2d02d30cc153ef6285ef6f4336e2c23b7fb060a908a71d5a2f75ee1261a" Jan 30 11:16:15 crc kubenswrapper[4869]: E0130 11:16:15.243379 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fc1c2d02d30cc153ef6285ef6f4336e2c23b7fb060a908a71d5a2f75ee1261a\": container with ID starting with 3fc1c2d02d30cc153ef6285ef6f4336e2c23b7fb060a908a71d5a2f75ee1261a not found: ID does not exist" containerID="3fc1c2d02d30cc153ef6285ef6f4336e2c23b7fb060a908a71d5a2f75ee1261a" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.243416 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fc1c2d02d30cc153ef6285ef6f4336e2c23b7fb060a908a71d5a2f75ee1261a"} err="failed to get container status \"3fc1c2d02d30cc153ef6285ef6f4336e2c23b7fb060a908a71d5a2f75ee1261a\": rpc error: code = NotFound desc = could not find container \"3fc1c2d02d30cc153ef6285ef6f4336e2c23b7fb060a908a71d5a2f75ee1261a\": container with ID starting with 3fc1c2d02d30cc153ef6285ef6f4336e2c23b7fb060a908a71d5a2f75ee1261a not found: ID does not exist" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.243440 4869 scope.go:117] "RemoveContainer" containerID="55eceea13613475cd0e0edba738b2fea582c353eb4608bd2ab58553bd72ccab9" Jan 30 11:16:15 crc kubenswrapper[4869]: E0130 11:16:15.243683 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55eceea13613475cd0e0edba738b2fea582c353eb4608bd2ab58553bd72ccab9\": container with ID starting with 55eceea13613475cd0e0edba738b2fea582c353eb4608bd2ab58553bd72ccab9 not found: ID does not exist" containerID="55eceea13613475cd0e0edba738b2fea582c353eb4608bd2ab58553bd72ccab9" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.243716 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55eceea13613475cd0e0edba738b2fea582c353eb4608bd2ab58553bd72ccab9"} err="failed to get container status \"55eceea13613475cd0e0edba738b2fea582c353eb4608bd2ab58553bd72ccab9\": rpc error: code = NotFound desc = could not find container \"55eceea13613475cd0e0edba738b2fea582c353eb4608bd2ab58553bd72ccab9\": container with ID starting with 55eceea13613475cd0e0edba738b2fea582c353eb4608bd2ab58553bd72ccab9 not found: ID does not exist" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.258940 4869 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.261130 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.268644 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-northd-0"] Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.309164 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.328240 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 30 11:16:15 crc kubenswrapper[4869]: E0130 11:16:15.360883 4869 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 30 11:16:15 crc kubenswrapper[4869]: E0130 11:16:15.360976 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-config-data podName:15b1a123-3831-4fa6-bc52-3f0cf30953f9 nodeName:}" failed. No retries permitted until 2026-01-30 11:16:23.360956202 +0000 UTC m=+1333.910832268 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-config-data") pod "rabbitmq-cell1-server-0" (UID: "15b1a123-3831-4fa6-bc52-3f0cf30953f9") : configmap "rabbitmq-cell1-config-data" not found Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.399392 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.563835 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-public-tls-certs\") pod \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.563928 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-combined-ca-bundle\") pod \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.563979 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-internal-tls-certs\") pod \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.563999 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-scripts\") pod \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.564025 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-config-data\") pod \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.564044 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-fernet-keys\") pod \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.564075 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-credential-keys\") pod \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.564119 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t99ck\" (UniqueName: \"kubernetes.io/projected/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-kube-api-access-t99ck\") pod \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\" (UID: \"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.571011 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-kube-api-access-t99ck" (OuterVolumeSpecName: "kube-api-access-t99ck") pod "dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1" (UID: "dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1"). InnerVolumeSpecName "kube-api-access-t99ck". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.574847 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-scripts" (OuterVolumeSpecName: "scripts") pod "dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1" (UID: "dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.580953 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1" (UID: "dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.586632 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1" (UID: "dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.615960 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1" (UID: "dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.616862 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1" (UID: "dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.622122 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-config-data" (OuterVolumeSpecName: "config-data") pod "dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1" (UID: "dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.629103 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1" (UID: "dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.666204 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t99ck\" (UniqueName: \"kubernetes.io/projected/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-kube-api-access-t99ck\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.666256 4869 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.666266 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.666275 4869 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.666285 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.666294 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.666302 4869 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.666324 4869 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.709235 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.868904 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-server-conf\") pod \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.868997 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.869121 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/15b1a123-3831-4fa6-bc52-3f0cf30953f9-rabbitmq-tls\") pod \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.869236 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-plugins-conf\") pod \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.870118 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/15b1a123-3831-4fa6-bc52-3f0cf30953f9-pod-info\") pod \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.870155 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/15b1a123-3831-4fa6-bc52-3f0cf30953f9-erlang-cookie-secret\") pod \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.870185 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-config-data\") pod \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.870223 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/15b1a123-3831-4fa6-bc52-3f0cf30953f9-rabbitmq-plugins\") pod \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.870247 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svbhm\" (UniqueName: \"kubernetes.io/projected/15b1a123-3831-4fa6-bc52-3f0cf30953f9-kube-api-access-svbhm\") pod \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.870674 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/15b1a123-3831-4fa6-bc52-3f0cf30953f9-rabbitmq-confd\") pod \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.870777 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/15b1a123-3831-4fa6-bc52-3f0cf30953f9-rabbitmq-erlang-cookie\") pod \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\" (UID: \"15b1a123-3831-4fa6-bc52-3f0cf30953f9\") " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.871923 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15b1a123-3831-4fa6-bc52-3f0cf30953f9-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "15b1a123-3831-4fa6-bc52-3f0cf30953f9" (UID: "15b1a123-3831-4fa6-bc52-3f0cf30953f9"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.873857 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "15b1a123-3831-4fa6-bc52-3f0cf30953f9" (UID: "15b1a123-3831-4fa6-bc52-3f0cf30953f9"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.874208 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15b1a123-3831-4fa6-bc52-3f0cf30953f9-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "15b1a123-3831-4fa6-bc52-3f0cf30953f9" (UID: "15b1a123-3831-4fa6-bc52-3f0cf30953f9"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.874521 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15b1a123-3831-4fa6-bc52-3f0cf30953f9-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "15b1a123-3831-4fa6-bc52-3f0cf30953f9" (UID: "15b1a123-3831-4fa6-bc52-3f0cf30953f9"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.875049 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "persistence") pod "15b1a123-3831-4fa6-bc52-3f0cf30953f9" (UID: "15b1a123-3831-4fa6-bc52-3f0cf30953f9"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.875601 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15b1a123-3831-4fa6-bc52-3f0cf30953f9-kube-api-access-svbhm" (OuterVolumeSpecName: "kube-api-access-svbhm") pod "15b1a123-3831-4fa6-bc52-3f0cf30953f9" (UID: "15b1a123-3831-4fa6-bc52-3f0cf30953f9"). InnerVolumeSpecName "kube-api-access-svbhm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.877880 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/15b1a123-3831-4fa6-bc52-3f0cf30953f9-pod-info" (OuterVolumeSpecName: "pod-info") pod "15b1a123-3831-4fa6-bc52-3f0cf30953f9" (UID: "15b1a123-3831-4fa6-bc52-3f0cf30953f9"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.890206 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-config-data" (OuterVolumeSpecName: "config-data") pod "15b1a123-3831-4fa6-bc52-3f0cf30953f9" (UID: "15b1a123-3831-4fa6-bc52-3f0cf30953f9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.891246 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15b1a123-3831-4fa6-bc52-3f0cf30953f9-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "15b1a123-3831-4fa6-bc52-3f0cf30953f9" (UID: "15b1a123-3831-4fa6-bc52-3f0cf30953f9"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.908547 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-server-conf" (OuterVolumeSpecName: "server-conf") pod "15b1a123-3831-4fa6-bc52-3f0cf30953f9" (UID: "15b1a123-3831-4fa6-bc52-3f0cf30953f9"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.954051 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15b1a123-3831-4fa6-bc52-3f0cf30953f9-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "15b1a123-3831-4fa6-bc52-3f0cf30953f9" (UID: "15b1a123-3831-4fa6-bc52-3f0cf30953f9"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:15 crc kubenswrapper[4869]: E0130 11:16:15.954330 4869 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Jan 30 11:16:15 crc kubenswrapper[4869]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2026-01-30T11:16:08Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Jan 30 11:16:15 crc kubenswrapper[4869]: /etc/init.d/functions: line 589: 400 Alarm clock "$@" Jan 30 11:16:15 crc kubenswrapper[4869]: > execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-gm6nb" message=< Jan 30 11:16:15 crc kubenswrapper[4869]: Exiting ovn-controller (1) [FAILED] Jan 30 11:16:15 crc kubenswrapper[4869]: Killing ovn-controller (1) [ OK ] Jan 30 11:16:15 crc kubenswrapper[4869]: Killing ovn-controller (1) with SIGKILL [ OK ] Jan 30 11:16:15 crc kubenswrapper[4869]: 2026-01-30T11:16:08Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Jan 30 11:16:15 crc kubenswrapper[4869]: /etc/init.d/functions: line 589: 400 Alarm clock "$@" Jan 30 11:16:15 crc kubenswrapper[4869]: > Jan 30 11:16:15 crc kubenswrapper[4869]: E0130 11:16:15.954369 4869 kuberuntime_container.go:691] "PreStop hook failed" err=< Jan 30 11:16:15 crc kubenswrapper[4869]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2026-01-30T11:16:08Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Jan 30 11:16:15 crc kubenswrapper[4869]: /etc/init.d/functions: line 589: 400 Alarm clock "$@" Jan 30 11:16:15 crc kubenswrapper[4869]: > pod="openstack/ovn-controller-gm6nb" podUID="f7eb4552-ad08-470d-b4c5-63c937f11717" containerName="ovn-controller" containerID="cri-o://22343d6d2159c59569cdcaeb63be8c1ccfd67f4ea5ab7aea714b76ecce386aea" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.954421 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-gm6nb" podUID="f7eb4552-ad08-470d-b4c5-63c937f11717" containerName="ovn-controller" containerID="cri-o://22343d6d2159c59569cdcaeb63be8c1ccfd67f4ea5ab7aea714b76ecce386aea" gracePeriod=22 Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.969926 4869 generic.go:334] "Generic (PLEG): container finished" podID="15b1a123-3831-4fa6-bc52-3f0cf30953f9" containerID="0cb29fabcce8f4cf1036b218ffdfe4e6ecf114fa1c35dcd0949e31985511687e" exitCode=0 Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.969988 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"15b1a123-3831-4fa6-bc52-3f0cf30953f9","Type":"ContainerDied","Data":"0cb29fabcce8f4cf1036b218ffdfe4e6ecf114fa1c35dcd0949e31985511687e"} Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.970017 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"15b1a123-3831-4fa6-bc52-3f0cf30953f9","Type":"ContainerDied","Data":"cfb68ec5584822a1531ad52242e67d1b565587f9a72b028a0705a3db2d003cf3"} Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.970033 4869 scope.go:117] "RemoveContainer" containerID="0cb29fabcce8f4cf1036b218ffdfe4e6ecf114fa1c35dcd0949e31985511687e" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.970134 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.972442 4869 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.972481 4869 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/15b1a123-3831-4fa6-bc52-3f0cf30953f9-pod-info\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.972490 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.972498 4869 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/15b1a123-3831-4fa6-bc52-3f0cf30953f9-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.972510 4869 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/15b1a123-3831-4fa6-bc52-3f0cf30953f9-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.972519 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svbhm\" (UniqueName: \"kubernetes.io/projected/15b1a123-3831-4fa6-bc52-3f0cf30953f9-kube-api-access-svbhm\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.972529 4869 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/15b1a123-3831-4fa6-bc52-3f0cf30953f9-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.972538 4869 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/15b1a123-3831-4fa6-bc52-3f0cf30953f9-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.972546 4869 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/15b1a123-3831-4fa6-bc52-3f0cf30953f9-server-conf\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.972580 4869 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.972590 4869 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/15b1a123-3831-4fa6-bc52-3f0cf30953f9-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.991238 4869 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.991837 4869 generic.go:334] "Generic (PLEG): container finished" podID="dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1" containerID="8f7118bc51e624fc83d241708fa7793b9fc18ea10bcad4284f453208f1ae3f69" exitCode=0 Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.991890 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6879fcbdc7-xgzr6" event={"ID":"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1","Type":"ContainerDied","Data":"8f7118bc51e624fc83d241708fa7793b9fc18ea10bcad4284f453208f1ae3f69"} Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.991924 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6879fcbdc7-xgzr6" event={"ID":"dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1","Type":"ContainerDied","Data":"14547de67c89fe61548feaf147a6440ef77b34ea2530d07ac2a91f821ffc1bc1"} Jan 30 11:16:15 crc kubenswrapper[4869]: I0130 11:16:15.992130 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6879fcbdc7-xgzr6" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.074292 4869 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.082202 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.088994 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.098363 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-6879fcbdc7-xgzr6"] Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.107920 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-6879fcbdc7-xgzr6"] Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.112938 4869 scope.go:117] "RemoveContainer" containerID="ac6a69179427ff89823caab2c5058cec074bad55265a0d608c121c2402131129" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.151669 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15b1a123-3831-4fa6-bc52-3f0cf30953f9" path="/var/lib/kubelet/pods/15b1a123-3831-4fa6-bc52-3f0cf30953f9/volumes" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.152506 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ae8a334-b758-420e-8aae-a3f6437f9816" path="/var/lib/kubelet/pods/2ae8a334-b758-420e-8aae-a3f6437f9816/volumes" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.154589 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34532f6a-b213-422d-8126-d74d95c32497" path="/var/lib/kubelet/pods/34532f6a-b213-422d-8126-d74d95c32497/volumes" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.155874 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="493ac356-9bec-4285-850c-8e3c7739641e" path="/var/lib/kubelet/pods/493ac356-9bec-4285-850c-8e3c7739641e/volumes" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.157140 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d1e4183-a136-428f-9bd8-e857a603da8f" path="/var/lib/kubelet/pods/4d1e4183-a136-428f-9bd8-e857a603da8f/volumes" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.158850 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f7d516c-1685-4033-891f-64008f56a468" path="/var/lib/kubelet/pods/4f7d516c-1685-4033-891f-64008f56a468/volumes" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.159786 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5339c4ee-a589-4517-bdbc-98f4f5dbb356" path="/var/lib/kubelet/pods/5339c4ee-a589-4517-bdbc-98f4f5dbb356/volumes" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.160580 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b94e67f1-cfa7-4470-96ad-440a78a7707e" path="/var/lib/kubelet/pods/b94e67f1-cfa7-4470-96ad-440a78a7707e/volumes" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.162296 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1" path="/var/lib/kubelet/pods/dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1/volumes" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.163089 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec34c29c-665f-465a-99d0-c342aca2cf14" path="/var/lib/kubelet/pods/ec34c29c-665f-465a-99d0-c342aca2cf14/volumes" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.164101 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5b9f902-0038-4057-b1c2-66222926c1b5" path="/var/lib/kubelet/pods/f5b9f902-0038-4057-b1c2-66222926c1b5/volumes" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.176921 4869 scope.go:117] "RemoveContainer" containerID="0cb29fabcce8f4cf1036b218ffdfe4e6ecf114fa1c35dcd0949e31985511687e" Jan 30 11:16:16 crc kubenswrapper[4869]: E0130 11:16:16.179455 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cb29fabcce8f4cf1036b218ffdfe4e6ecf114fa1c35dcd0949e31985511687e\": container with ID starting with 0cb29fabcce8f4cf1036b218ffdfe4e6ecf114fa1c35dcd0949e31985511687e not found: ID does not exist" containerID="0cb29fabcce8f4cf1036b218ffdfe4e6ecf114fa1c35dcd0949e31985511687e" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.179527 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cb29fabcce8f4cf1036b218ffdfe4e6ecf114fa1c35dcd0949e31985511687e"} err="failed to get container status \"0cb29fabcce8f4cf1036b218ffdfe4e6ecf114fa1c35dcd0949e31985511687e\": rpc error: code = NotFound desc = could not find container \"0cb29fabcce8f4cf1036b218ffdfe4e6ecf114fa1c35dcd0949e31985511687e\": container with ID starting with 0cb29fabcce8f4cf1036b218ffdfe4e6ecf114fa1c35dcd0949e31985511687e not found: ID does not exist" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.179561 4869 scope.go:117] "RemoveContainer" containerID="ac6a69179427ff89823caab2c5058cec074bad55265a0d608c121c2402131129" Jan 30 11:16:16 crc kubenswrapper[4869]: E0130 11:16:16.180783 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac6a69179427ff89823caab2c5058cec074bad55265a0d608c121c2402131129\": container with ID starting with ac6a69179427ff89823caab2c5058cec074bad55265a0d608c121c2402131129 not found: ID does not exist" containerID="ac6a69179427ff89823caab2c5058cec074bad55265a0d608c121c2402131129" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.180810 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac6a69179427ff89823caab2c5058cec074bad55265a0d608c121c2402131129"} err="failed to get container status \"ac6a69179427ff89823caab2c5058cec074bad55265a0d608c121c2402131129\": rpc error: code = NotFound desc = could not find container \"ac6a69179427ff89823caab2c5058cec074bad55265a0d608c121c2402131129\": container with ID starting with ac6a69179427ff89823caab2c5058cec074bad55265a0d608c121c2402131129 not found: ID does not exist" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.180825 4869 scope.go:117] "RemoveContainer" containerID="8f7118bc51e624fc83d241708fa7793b9fc18ea10bcad4284f453208f1ae3f69" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.213887 4869 scope.go:117] "RemoveContainer" containerID="8f7118bc51e624fc83d241708fa7793b9fc18ea10bcad4284f453208f1ae3f69" Jan 30 11:16:16 crc kubenswrapper[4869]: E0130 11:16:16.214406 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f7118bc51e624fc83d241708fa7793b9fc18ea10bcad4284f453208f1ae3f69\": container with ID starting with 8f7118bc51e624fc83d241708fa7793b9fc18ea10bcad4284f453208f1ae3f69 not found: ID does not exist" containerID="8f7118bc51e624fc83d241708fa7793b9fc18ea10bcad4284f453208f1ae3f69" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.214440 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f7118bc51e624fc83d241708fa7793b9fc18ea10bcad4284f453208f1ae3f69"} err="failed to get container status \"8f7118bc51e624fc83d241708fa7793b9fc18ea10bcad4284f453208f1ae3f69\": rpc error: code = NotFound desc = could not find container \"8f7118bc51e624fc83d241708fa7793b9fc18ea10bcad4284f453208f1ae3f69\": container with ID starting with 8f7118bc51e624fc83d241708fa7793b9fc18ea10bcad4284f453208f1ae3f69 not found: ID does not exist" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.321299 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-gm6nb_f7eb4552-ad08-470d-b4c5-63c937f11717/ovn-controller/0.log" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.321409 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gm6nb" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.466878 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.484584 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7eb4552-ad08-470d-b4c5-63c937f11717-combined-ca-bundle\") pod \"f7eb4552-ad08-470d-b4c5-63c937f11717\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.484685 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/f7eb4552-ad08-470d-b4c5-63c937f11717-var-log-ovn\") pod \"f7eb4552-ad08-470d-b4c5-63c937f11717\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.484857 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f7eb4552-ad08-470d-b4c5-63c937f11717-scripts\") pod \"f7eb4552-ad08-470d-b4c5-63c937f11717\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.484915 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7eb4552-ad08-470d-b4c5-63c937f11717-ovn-controller-tls-certs\") pod \"f7eb4552-ad08-470d-b4c5-63c937f11717\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.485000 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/f7eb4552-ad08-470d-b4c5-63c937f11717-var-run-ovn\") pod \"f7eb4552-ad08-470d-b4c5-63c937f11717\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.485026 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f7eb4552-ad08-470d-b4c5-63c937f11717-var-run\") pod \"f7eb4552-ad08-470d-b4c5-63c937f11717\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.485108 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-srpwq\" (UniqueName: \"kubernetes.io/projected/f7eb4552-ad08-470d-b4c5-63c937f11717-kube-api-access-srpwq\") pod \"f7eb4552-ad08-470d-b4c5-63c937f11717\" (UID: \"f7eb4552-ad08-470d-b4c5-63c937f11717\") " Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.493982 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7eb4552-ad08-470d-b4c5-63c937f11717-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "f7eb4552-ad08-470d-b4c5-63c937f11717" (UID: "f7eb4552-ad08-470d-b4c5-63c937f11717"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.494043 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7eb4552-ad08-470d-b4c5-63c937f11717-var-run" (OuterVolumeSpecName: "var-run") pod "f7eb4552-ad08-470d-b4c5-63c937f11717" (UID: "f7eb4552-ad08-470d-b4c5-63c937f11717"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.494095 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7eb4552-ad08-470d-b4c5-63c937f11717-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "f7eb4552-ad08-470d-b4c5-63c937f11717" (UID: "f7eb4552-ad08-470d-b4c5-63c937f11717"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.494753 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7eb4552-ad08-470d-b4c5-63c937f11717-kube-api-access-srpwq" (OuterVolumeSpecName: "kube-api-access-srpwq") pod "f7eb4552-ad08-470d-b4c5-63c937f11717" (UID: "f7eb4552-ad08-470d-b4c5-63c937f11717"). InnerVolumeSpecName "kube-api-access-srpwq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.495786 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7eb4552-ad08-470d-b4c5-63c937f11717-scripts" (OuterVolumeSpecName: "scripts") pod "f7eb4552-ad08-470d-b4c5-63c937f11717" (UID: "f7eb4552-ad08-470d-b4c5-63c937f11717"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.519835 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7eb4552-ad08-470d-b4c5-63c937f11717-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f7eb4552-ad08-470d-b4c5-63c937f11717" (UID: "f7eb4552-ad08-470d-b4c5-63c937f11717"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.551592 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7eb4552-ad08-470d-b4c5-63c937f11717-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "f7eb4552-ad08-470d-b4c5-63c937f11717" (UID: "f7eb4552-ad08-470d-b4c5-63c937f11717"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.586327 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/161960a2-9537-4f72-913b-54b23f2b4be7-config-data-custom\") pod \"161960a2-9537-4f72-913b-54b23f2b4be7\" (UID: \"161960a2-9537-4f72-913b-54b23f2b4be7\") " Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.586394 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxwkn\" (UniqueName: \"kubernetes.io/projected/161960a2-9537-4f72-913b-54b23f2b4be7-kube-api-access-wxwkn\") pod \"161960a2-9537-4f72-913b-54b23f2b4be7\" (UID: \"161960a2-9537-4f72-913b-54b23f2b4be7\") " Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.586460 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/161960a2-9537-4f72-913b-54b23f2b4be7-etc-machine-id\") pod \"161960a2-9537-4f72-913b-54b23f2b4be7\" (UID: \"161960a2-9537-4f72-913b-54b23f2b4be7\") " Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.586513 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/161960a2-9537-4f72-913b-54b23f2b4be7-config-data\") pod \"161960a2-9537-4f72-913b-54b23f2b4be7\" (UID: \"161960a2-9537-4f72-913b-54b23f2b4be7\") " Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.586535 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/161960a2-9537-4f72-913b-54b23f2b4be7-scripts\") pod \"161960a2-9537-4f72-913b-54b23f2b4be7\" (UID: \"161960a2-9537-4f72-913b-54b23f2b4be7\") " Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.586565 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/161960a2-9537-4f72-913b-54b23f2b4be7-combined-ca-bundle\") pod \"161960a2-9537-4f72-913b-54b23f2b4be7\" (UID: \"161960a2-9537-4f72-913b-54b23f2b4be7\") " Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.586857 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-srpwq\" (UniqueName: \"kubernetes.io/projected/f7eb4552-ad08-470d-b4c5-63c937f11717-kube-api-access-srpwq\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.586872 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7eb4552-ad08-470d-b4c5-63c937f11717-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.586882 4869 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/f7eb4552-ad08-470d-b4c5-63c937f11717-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.586891 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f7eb4552-ad08-470d-b4c5-63c937f11717-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.586899 4869 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7eb4552-ad08-470d-b4c5-63c937f11717-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.586908 4869 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/f7eb4552-ad08-470d-b4c5-63c937f11717-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.586917 4869 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f7eb4552-ad08-470d-b4c5-63c937f11717-var-run\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.586861 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/161960a2-9537-4f72-913b-54b23f2b4be7-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "161960a2-9537-4f72-913b-54b23f2b4be7" (UID: "161960a2-9537-4f72-913b-54b23f2b4be7"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.603881 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/161960a2-9537-4f72-913b-54b23f2b4be7-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "161960a2-9537-4f72-913b-54b23f2b4be7" (UID: "161960a2-9537-4f72-913b-54b23f2b4be7"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.625637 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/161960a2-9537-4f72-913b-54b23f2b4be7-scripts" (OuterVolumeSpecName: "scripts") pod "161960a2-9537-4f72-913b-54b23f2b4be7" (UID: "161960a2-9537-4f72-913b-54b23f2b4be7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.626887 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/161960a2-9537-4f72-913b-54b23f2b4be7-kube-api-access-wxwkn" (OuterVolumeSpecName: "kube-api-access-wxwkn") pod "161960a2-9537-4f72-913b-54b23f2b4be7" (UID: "161960a2-9537-4f72-913b-54b23f2b4be7"). InnerVolumeSpecName "kube-api-access-wxwkn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.653347 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/161960a2-9537-4f72-913b-54b23f2b4be7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "161960a2-9537-4f72-913b-54b23f2b4be7" (UID: "161960a2-9537-4f72-913b-54b23f2b4be7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.690636 4869 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/161960a2-9537-4f72-913b-54b23f2b4be7-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.690672 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/161960a2-9537-4f72-913b-54b23f2b4be7-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.690681 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/161960a2-9537-4f72-913b-54b23f2b4be7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.690691 4869 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/161960a2-9537-4f72-913b-54b23f2b4be7-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.690699 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxwkn\" (UniqueName: \"kubernetes.io/projected/161960a2-9537-4f72-913b-54b23f2b4be7-kube-api-access-wxwkn\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.714959 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/161960a2-9537-4f72-913b-54b23f2b4be7-config-data" (OuterVolumeSpecName: "config-data") pod "161960a2-9537-4f72-913b-54b23f2b4be7" (UID: "161960a2-9537-4f72-913b-54b23f2b4be7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.792025 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/161960a2-9537-4f72-913b-54b23f2b4be7-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:16 crc kubenswrapper[4869]: I0130 11:16:16.997353 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.003501 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-gm6nb_f7eb4552-ad08-470d-b4c5-63c937f11717/ovn-controller/0.log" Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.003557 4869 generic.go:334] "Generic (PLEG): container finished" podID="f7eb4552-ad08-470d-b4c5-63c937f11717" containerID="22343d6d2159c59569cdcaeb63be8c1ccfd67f4ea5ab7aea714b76ecce386aea" exitCode=137 Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.003634 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-gm6nb" event={"ID":"f7eb4552-ad08-470d-b4c5-63c937f11717","Type":"ContainerDied","Data":"22343d6d2159c59569cdcaeb63be8c1ccfd67f4ea5ab7aea714b76ecce386aea"} Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.003650 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gm6nb" Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.003671 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-gm6nb" event={"ID":"f7eb4552-ad08-470d-b4c5-63c937f11717","Type":"ContainerDied","Data":"0ab9f05a65782af6017be7ca2a9d77511ebf3ff6b198307f140b667e485c331d"} Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.003691 4869 scope.go:117] "RemoveContainer" containerID="22343d6d2159c59569cdcaeb63be8c1ccfd67f4ea5ab7aea714b76ecce386aea" Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.008949 4869 generic.go:334] "Generic (PLEG): container finished" podID="161960a2-9537-4f72-913b-54b23f2b4be7" containerID="eb26d980e7e5df4635ca9343e2f611e5ce78edd230c1081616317f662f549187" exitCode=0 Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.009023 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"161960a2-9537-4f72-913b-54b23f2b4be7","Type":"ContainerDied","Data":"eb26d980e7e5df4635ca9343e2f611e5ce78edd230c1081616317f662f549187"} Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.009047 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"161960a2-9537-4f72-913b-54b23f2b4be7","Type":"ContainerDied","Data":"ab4e3e504b0ade65472a44b603f13be52f4879320a3ee1e562aad2f21f4df9bf"} Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.009136 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.016186 4869 generic.go:334] "Generic (PLEG): container finished" podID="439024e7-e7a3-42c4-b9a1-db6705ec33d2" containerID="f1f67a718d23c87ea020c3a69713e73dad35f9fc50127281c32e30e68f5980e5" exitCode=0 Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.016285 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"439024e7-e7a3-42c4-b9a1-db6705ec33d2","Type":"ContainerDied","Data":"f1f67a718d23c87ea020c3a69713e73dad35f9fc50127281c32e30e68f5980e5"} Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.016318 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"439024e7-e7a3-42c4-b9a1-db6705ec33d2","Type":"ContainerDied","Data":"07bad36b9322f0b213ac681c1a99f42909f82f43825f8ddba17d3188127b9cb7"} Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.016378 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.039840 4869 scope.go:117] "RemoveContainer" containerID="22343d6d2159c59569cdcaeb63be8c1ccfd67f4ea5ab7aea714b76ecce386aea" Jan 30 11:16:17 crc kubenswrapper[4869]: E0130 11:16:17.040950 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22343d6d2159c59569cdcaeb63be8c1ccfd67f4ea5ab7aea714b76ecce386aea\": container with ID starting with 22343d6d2159c59569cdcaeb63be8c1ccfd67f4ea5ab7aea714b76ecce386aea not found: ID does not exist" containerID="22343d6d2159c59569cdcaeb63be8c1ccfd67f4ea5ab7aea714b76ecce386aea" Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.040996 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22343d6d2159c59569cdcaeb63be8c1ccfd67f4ea5ab7aea714b76ecce386aea"} err="failed to get container status \"22343d6d2159c59569cdcaeb63be8c1ccfd67f4ea5ab7aea714b76ecce386aea\": rpc error: code = NotFound desc = could not find container \"22343d6d2159c59569cdcaeb63be8c1ccfd67f4ea5ab7aea714b76ecce386aea\": container with ID starting with 22343d6d2159c59569cdcaeb63be8c1ccfd67f4ea5ab7aea714b76ecce386aea not found: ID does not exist" Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.041020 4869 scope.go:117] "RemoveContainer" containerID="12100bca04bc598644a6f4f21b6886c14fe32934d7b28768c12d907536df5008" Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.054586 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-gm6nb"] Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.074525 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-gm6nb"] Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.074623 4869 scope.go:117] "RemoveContainer" containerID="eb26d980e7e5df4635ca9343e2f611e5ce78edd230c1081616317f662f549187" Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.077769 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.084443 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.095280 4869 scope.go:117] "RemoveContainer" containerID="12100bca04bc598644a6f4f21b6886c14fe32934d7b28768c12d907536df5008" Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.095697 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/439024e7-e7a3-42c4-b9a1-db6705ec33d2-combined-ca-bundle\") pod \"439024e7-e7a3-42c4-b9a1-db6705ec33d2\" (UID: \"439024e7-e7a3-42c4-b9a1-db6705ec33d2\") " Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.095787 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9pvvr\" (UniqueName: \"kubernetes.io/projected/439024e7-e7a3-42c4-b9a1-db6705ec33d2-kube-api-access-9pvvr\") pod \"439024e7-e7a3-42c4-b9a1-db6705ec33d2\" (UID: \"439024e7-e7a3-42c4-b9a1-db6705ec33d2\") " Jan 30 11:16:17 crc kubenswrapper[4869]: E0130 11:16:17.095767 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12100bca04bc598644a6f4f21b6886c14fe32934d7b28768c12d907536df5008\": container with ID starting with 12100bca04bc598644a6f4f21b6886c14fe32934d7b28768c12d907536df5008 not found: ID does not exist" containerID="12100bca04bc598644a6f4f21b6886c14fe32934d7b28768c12d907536df5008" Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.095829 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12100bca04bc598644a6f4f21b6886c14fe32934d7b28768c12d907536df5008"} err="failed to get container status \"12100bca04bc598644a6f4f21b6886c14fe32934d7b28768c12d907536df5008\": rpc error: code = NotFound desc = could not find container \"12100bca04bc598644a6f4f21b6886c14fe32934d7b28768c12d907536df5008\": container with ID starting with 12100bca04bc598644a6f4f21b6886c14fe32934d7b28768c12d907536df5008 not found: ID does not exist" Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.095855 4869 scope.go:117] "RemoveContainer" containerID="eb26d980e7e5df4635ca9343e2f611e5ce78edd230c1081616317f662f549187" Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.095856 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/439024e7-e7a3-42c4-b9a1-db6705ec33d2-config-data\") pod \"439024e7-e7a3-42c4-b9a1-db6705ec33d2\" (UID: \"439024e7-e7a3-42c4-b9a1-db6705ec33d2\") " Jan 30 11:16:17 crc kubenswrapper[4869]: E0130 11:16:17.096621 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb26d980e7e5df4635ca9343e2f611e5ce78edd230c1081616317f662f549187\": container with ID starting with eb26d980e7e5df4635ca9343e2f611e5ce78edd230c1081616317f662f549187 not found: ID does not exist" containerID="eb26d980e7e5df4635ca9343e2f611e5ce78edd230c1081616317f662f549187" Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.096745 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb26d980e7e5df4635ca9343e2f611e5ce78edd230c1081616317f662f549187"} err="failed to get container status \"eb26d980e7e5df4635ca9343e2f611e5ce78edd230c1081616317f662f549187\": rpc error: code = NotFound desc = could not find container \"eb26d980e7e5df4635ca9343e2f611e5ce78edd230c1081616317f662f549187\": container with ID starting with eb26d980e7e5df4635ca9343e2f611e5ce78edd230c1081616317f662f549187 not found: ID does not exist" Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.096770 4869 scope.go:117] "RemoveContainer" containerID="f1f67a718d23c87ea020c3a69713e73dad35f9fc50127281c32e30e68f5980e5" Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.100557 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/439024e7-e7a3-42c4-b9a1-db6705ec33d2-kube-api-access-9pvvr" (OuterVolumeSpecName: "kube-api-access-9pvvr") pod "439024e7-e7a3-42c4-b9a1-db6705ec33d2" (UID: "439024e7-e7a3-42c4-b9a1-db6705ec33d2"). InnerVolumeSpecName "kube-api-access-9pvvr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.116761 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/439024e7-e7a3-42c4-b9a1-db6705ec33d2-config-data" (OuterVolumeSpecName: "config-data") pod "439024e7-e7a3-42c4-b9a1-db6705ec33d2" (UID: "439024e7-e7a3-42c4-b9a1-db6705ec33d2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.117934 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/439024e7-e7a3-42c4-b9a1-db6705ec33d2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "439024e7-e7a3-42c4-b9a1-db6705ec33d2" (UID: "439024e7-e7a3-42c4-b9a1-db6705ec33d2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.118524 4869 scope.go:117] "RemoveContainer" containerID="f1f67a718d23c87ea020c3a69713e73dad35f9fc50127281c32e30e68f5980e5" Jan 30 11:16:17 crc kubenswrapper[4869]: E0130 11:16:17.119702 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1f67a718d23c87ea020c3a69713e73dad35f9fc50127281c32e30e68f5980e5\": container with ID starting with f1f67a718d23c87ea020c3a69713e73dad35f9fc50127281c32e30e68f5980e5 not found: ID does not exist" containerID="f1f67a718d23c87ea020c3a69713e73dad35f9fc50127281c32e30e68f5980e5" Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.119753 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1f67a718d23c87ea020c3a69713e73dad35f9fc50127281c32e30e68f5980e5"} err="failed to get container status \"f1f67a718d23c87ea020c3a69713e73dad35f9fc50127281c32e30e68f5980e5\": rpc error: code = NotFound desc = could not find container \"f1f67a718d23c87ea020c3a69713e73dad35f9fc50127281c32e30e68f5980e5\": container with ID starting with f1f67a718d23c87ea020c3a69713e73dad35f9fc50127281c32e30e68f5980e5 not found: ID does not exist" Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.197915 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/439024e7-e7a3-42c4-b9a1-db6705ec33d2-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.197982 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/439024e7-e7a3-42c4-b9a1-db6705ec33d2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.197993 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9pvvr\" (UniqueName: \"kubernetes.io/projected/439024e7-e7a3-42c4-b9a1-db6705ec33d2-kube-api-access-9pvvr\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.348757 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 30 11:16:17 crc kubenswrapper[4869]: I0130 11:16:17.354071 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 30 11:16:18 crc kubenswrapper[4869]: E0130 11:16:18.096199 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c is running failed: container process not found" containerID="805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 30 11:16:18 crc kubenswrapper[4869]: E0130 11:16:18.096557 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c is running failed: container process not found" containerID="805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 30 11:16:18 crc kubenswrapper[4869]: E0130 11:16:18.096899 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c is running failed: container process not found" containerID="805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 30 11:16:18 crc kubenswrapper[4869]: E0130 11:16:18.096949 4869 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-jfzdq" podUID="e4264086-12ed-4655-9657-14083653d56d" containerName="ovsdb-server" Jan 30 11:16:18 crc kubenswrapper[4869]: E0130 11:16:18.097020 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 30 11:16:18 crc kubenswrapper[4869]: E0130 11:16:18.098984 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 30 11:16:18 crc kubenswrapper[4869]: E0130 11:16:18.100443 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 30 11:16:18 crc kubenswrapper[4869]: E0130 11:16:18.100476 4869 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-jfzdq" podUID="e4264086-12ed-4655-9657-14083653d56d" containerName="ovs-vswitchd" Jan 30 11:16:18 crc kubenswrapper[4869]: I0130 11:16:18.148865 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="161960a2-9537-4f72-913b-54b23f2b4be7" path="/var/lib/kubelet/pods/161960a2-9537-4f72-913b-54b23f2b4be7/volumes" Jan 30 11:16:18 crc kubenswrapper[4869]: I0130 11:16:18.149453 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="439024e7-e7a3-42c4-b9a1-db6705ec33d2" path="/var/lib/kubelet/pods/439024e7-e7a3-42c4-b9a1-db6705ec33d2/volumes" Jan 30 11:16:18 crc kubenswrapper[4869]: I0130 11:16:18.150110 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7eb4552-ad08-470d-b4c5-63c937f11717" path="/var/lib/kubelet/pods/f7eb4552-ad08-470d-b4c5-63c937f11717/volumes" Jan 30 11:16:23 crc kubenswrapper[4869]: E0130 11:16:23.095093 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c is running failed: container process not found" containerID="805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 30 11:16:23 crc kubenswrapper[4869]: E0130 11:16:23.096028 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c is running failed: container process not found" containerID="805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 30 11:16:23 crc kubenswrapper[4869]: E0130 11:16:23.096135 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 30 11:16:23 crc kubenswrapper[4869]: E0130 11:16:23.096350 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c is running failed: container process not found" containerID="805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 30 11:16:23 crc kubenswrapper[4869]: E0130 11:16:23.096386 4869 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-jfzdq" podUID="e4264086-12ed-4655-9657-14083653d56d" containerName="ovsdb-server" Jan 30 11:16:23 crc kubenswrapper[4869]: E0130 11:16:23.097537 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 30 11:16:23 crc kubenswrapper[4869]: E0130 11:16:23.098861 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 30 11:16:23 crc kubenswrapper[4869]: E0130 11:16:23.099038 4869 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-jfzdq" podUID="e4264086-12ed-4655-9657-14083653d56d" containerName="ovs-vswitchd" Jan 30 11:16:28 crc kubenswrapper[4869]: E0130 11:16:28.094852 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c is running failed: container process not found" containerID="805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 30 11:16:28 crc kubenswrapper[4869]: E0130 11:16:28.095466 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c is running failed: container process not found" containerID="805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 30 11:16:28 crc kubenswrapper[4869]: E0130 11:16:28.096324 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 30 11:16:28 crc kubenswrapper[4869]: E0130 11:16:28.096423 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c is running failed: container process not found" containerID="805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 30 11:16:28 crc kubenswrapper[4869]: E0130 11:16:28.096477 4869 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-jfzdq" podUID="e4264086-12ed-4655-9657-14083653d56d" containerName="ovsdb-server" Jan 30 11:16:28 crc kubenswrapper[4869]: E0130 11:16:28.098121 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 30 11:16:28 crc kubenswrapper[4869]: E0130 11:16:28.100695 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 30 11:16:28 crc kubenswrapper[4869]: E0130 11:16:28.100756 4869 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-jfzdq" podUID="e4264086-12ed-4655-9657-14083653d56d" containerName="ovs-vswitchd" Jan 30 11:16:30 crc kubenswrapper[4869]: I0130 11:16:30.127986 4869 generic.go:334] "Generic (PLEG): container finished" podID="484ba6c3-20dc-4b27-b7f5-901eef0643a7" containerID="a381b048e1d6b21ec22cf7214a1b8e39fa926bed88e85c72ebe8ab5f7bfa2c5b" exitCode=0 Jan 30 11:16:30 crc kubenswrapper[4869]: I0130 11:16:30.128076 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-df6cb98f-8s46w" event={"ID":"484ba6c3-20dc-4b27-b7f5-901eef0643a7","Type":"ContainerDied","Data":"a381b048e1d6b21ec22cf7214a1b8e39fa926bed88e85c72ebe8ab5f7bfa2c5b"} Jan 30 11:16:30 crc kubenswrapper[4869]: I0130 11:16:30.206348 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:16:30 crc kubenswrapper[4869]: I0130 11:16:30.290799 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-public-tls-certs\") pod \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " Jan 30 11:16:30 crc kubenswrapper[4869]: I0130 11:16:30.290866 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-internal-tls-certs\") pod \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " Jan 30 11:16:30 crc kubenswrapper[4869]: I0130 11:16:30.290910 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-httpd-config\") pod \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " Jan 30 11:16:30 crc kubenswrapper[4869]: I0130 11:16:30.290976 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5c2wl\" (UniqueName: \"kubernetes.io/projected/484ba6c3-20dc-4b27-b7f5-901eef0643a7-kube-api-access-5c2wl\") pod \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " Jan 30 11:16:30 crc kubenswrapper[4869]: I0130 11:16:30.291060 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-ovndb-tls-certs\") pod \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " Jan 30 11:16:30 crc kubenswrapper[4869]: I0130 11:16:30.291083 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-combined-ca-bundle\") pod \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " Jan 30 11:16:30 crc kubenswrapper[4869]: I0130 11:16:30.291145 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-config\") pod \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\" (UID: \"484ba6c3-20dc-4b27-b7f5-901eef0643a7\") " Jan 30 11:16:30 crc kubenswrapper[4869]: I0130 11:16:30.296792 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "484ba6c3-20dc-4b27-b7f5-901eef0643a7" (UID: "484ba6c3-20dc-4b27-b7f5-901eef0643a7"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:30 crc kubenswrapper[4869]: I0130 11:16:30.296878 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/484ba6c3-20dc-4b27-b7f5-901eef0643a7-kube-api-access-5c2wl" (OuterVolumeSpecName: "kube-api-access-5c2wl") pod "484ba6c3-20dc-4b27-b7f5-901eef0643a7" (UID: "484ba6c3-20dc-4b27-b7f5-901eef0643a7"). InnerVolumeSpecName "kube-api-access-5c2wl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:30 crc kubenswrapper[4869]: I0130 11:16:30.330275 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "484ba6c3-20dc-4b27-b7f5-901eef0643a7" (UID: "484ba6c3-20dc-4b27-b7f5-901eef0643a7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:30 crc kubenswrapper[4869]: I0130 11:16:30.331649 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-config" (OuterVolumeSpecName: "config") pod "484ba6c3-20dc-4b27-b7f5-901eef0643a7" (UID: "484ba6c3-20dc-4b27-b7f5-901eef0643a7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:30 crc kubenswrapper[4869]: I0130 11:16:30.332289 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "484ba6c3-20dc-4b27-b7f5-901eef0643a7" (UID: "484ba6c3-20dc-4b27-b7f5-901eef0643a7"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:30 crc kubenswrapper[4869]: I0130 11:16:30.332364 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "484ba6c3-20dc-4b27-b7f5-901eef0643a7" (UID: "484ba6c3-20dc-4b27-b7f5-901eef0643a7"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:30 crc kubenswrapper[4869]: I0130 11:16:30.345730 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "484ba6c3-20dc-4b27-b7f5-901eef0643a7" (UID: "484ba6c3-20dc-4b27-b7f5-901eef0643a7"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:30 crc kubenswrapper[4869]: I0130 11:16:30.392934 4869 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:30 crc kubenswrapper[4869]: I0130 11:16:30.392970 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:30 crc kubenswrapper[4869]: I0130 11:16:30.392980 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:30 crc kubenswrapper[4869]: I0130 11:16:30.392993 4869 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:30 crc kubenswrapper[4869]: I0130 11:16:30.393001 4869 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:30 crc kubenswrapper[4869]: I0130 11:16:30.393009 4869 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/484ba6c3-20dc-4b27-b7f5-901eef0643a7-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:30 crc kubenswrapper[4869]: I0130 11:16:30.393016 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5c2wl\" (UniqueName: \"kubernetes.io/projected/484ba6c3-20dc-4b27-b7f5-901eef0643a7-kube-api-access-5c2wl\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:31 crc kubenswrapper[4869]: I0130 11:16:31.141685 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-df6cb98f-8s46w" event={"ID":"484ba6c3-20dc-4b27-b7f5-901eef0643a7","Type":"ContainerDied","Data":"306ab68261b0c4da8e178e5d159061441e37413a08b256b72bbea8705ac02494"} Jan 30 11:16:31 crc kubenswrapper[4869]: I0130 11:16:31.141772 4869 scope.go:117] "RemoveContainer" containerID="a5570db7baf6da4c91df79cef463e37a3e6477f10e7ccd7079f3bf311e981158" Jan 30 11:16:31 crc kubenswrapper[4869]: I0130 11:16:31.141795 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-df6cb98f-8s46w" Jan 30 11:16:31 crc kubenswrapper[4869]: I0130 11:16:31.184856 4869 scope.go:117] "RemoveContainer" containerID="a381b048e1d6b21ec22cf7214a1b8e39fa926bed88e85c72ebe8ab5f7bfa2c5b" Jan 30 11:16:31 crc kubenswrapper[4869]: I0130 11:16:31.185613 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-df6cb98f-8s46w"] Jan 30 11:16:31 crc kubenswrapper[4869]: I0130 11:16:31.196453 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-df6cb98f-8s46w"] Jan 30 11:16:32 crc kubenswrapper[4869]: I0130 11:16:32.144313 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="484ba6c3-20dc-4b27-b7f5-901eef0643a7" path="/var/lib/kubelet/pods/484ba6c3-20dc-4b27-b7f5-901eef0643a7/volumes" Jan 30 11:16:33 crc kubenswrapper[4869]: E0130 11:16:33.095041 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c is running failed: container process not found" containerID="805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 30 11:16:33 crc kubenswrapper[4869]: E0130 11:16:33.095529 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c is running failed: container process not found" containerID="805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 30 11:16:33 crc kubenswrapper[4869]: E0130 11:16:33.095896 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c is running failed: container process not found" containerID="805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 30 11:16:33 crc kubenswrapper[4869]: E0130 11:16:33.095941 4869 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-jfzdq" podUID="e4264086-12ed-4655-9657-14083653d56d" containerName="ovsdb-server" Jan 30 11:16:33 crc kubenswrapper[4869]: E0130 11:16:33.096174 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 30 11:16:33 crc kubenswrapper[4869]: E0130 11:16:33.097278 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 30 11:16:33 crc kubenswrapper[4869]: E0130 11:16:33.098807 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 30 11:16:33 crc kubenswrapper[4869]: E0130 11:16:33.098873 4869 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-jfzdq" podUID="e4264086-12ed-4655-9657-14083653d56d" containerName="ovs-vswitchd" Jan 30 11:16:38 crc kubenswrapper[4869]: E0130 11:16:38.094576 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c is running failed: container process not found" containerID="805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 30 11:16:38 crc kubenswrapper[4869]: E0130 11:16:38.094600 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021 is running failed: container process not found" containerID="9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 30 11:16:38 crc kubenswrapper[4869]: E0130 11:16:38.095616 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c is running failed: container process not found" containerID="805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 30 11:16:38 crc kubenswrapper[4869]: E0130 11:16:38.095748 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021 is running failed: container process not found" containerID="9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 30 11:16:38 crc kubenswrapper[4869]: E0130 11:16:38.097585 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c is running failed: container process not found" containerID="805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 30 11:16:38 crc kubenswrapper[4869]: E0130 11:16:38.097631 4869 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-jfzdq" podUID="e4264086-12ed-4655-9657-14083653d56d" containerName="ovsdb-server" Jan 30 11:16:38 crc kubenswrapper[4869]: E0130 11:16:38.097773 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021 is running failed: container process not found" containerID="9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 30 11:16:38 crc kubenswrapper[4869]: E0130 11:16:38.097829 4869 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-jfzdq" podUID="e4264086-12ed-4655-9657-14083653d56d" containerName="ovs-vswitchd" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.210696 4869 generic.go:334] "Generic (PLEG): container finished" podID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerID="e6f93331ee688f8b3d08b68521bc6870dc9ec3fe42c2459935fb484bbb47b43b" exitCode=137 Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.210813 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerDied","Data":"e6f93331ee688f8b3d08b68521bc6870dc9ec3fe42c2459935fb484bbb47b43b"} Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.213074 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-jfzdq_e4264086-12ed-4655-9657-14083653d56d/ovs-vswitchd/0.log" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.213858 4869 generic.go:334] "Generic (PLEG): container finished" podID="e4264086-12ed-4655-9657-14083653d56d" containerID="9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021" exitCode=137 Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.213896 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-jfzdq" event={"ID":"e4264086-12ed-4655-9657-14083653d56d","Type":"ContainerDied","Data":"9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021"} Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.339901 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-jfzdq_e4264086-12ed-4655-9657-14083653d56d/ovs-vswitchd/0.log" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.341066 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.356191 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.510739 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/7b1a0e46-1fb4-4ab1-9417-cba939546529-cache\") pod \"7b1a0e46-1fb4-4ab1-9417-cba939546529\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.510796 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e4264086-12ed-4655-9657-14083653d56d-var-run\") pod \"e4264086-12ed-4655-9657-14083653d56d\" (UID: \"e4264086-12ed-4655-9657-14083653d56d\") " Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.510842 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b1a0e46-1fb4-4ab1-9417-cba939546529-combined-ca-bundle\") pod \"7b1a0e46-1fb4-4ab1-9417-cba939546529\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.510869 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-etc-swift\") pod \"7b1a0e46-1fb4-4ab1-9417-cba939546529\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.510885 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"7b1a0e46-1fb4-4ab1-9417-cba939546529\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.510931 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e4264086-12ed-4655-9657-14083653d56d-scripts\") pod \"e4264086-12ed-4655-9657-14083653d56d\" (UID: \"e4264086-12ed-4655-9657-14083653d56d\") " Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.510954 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/e4264086-12ed-4655-9657-14083653d56d-etc-ovs\") pod \"e4264086-12ed-4655-9657-14083653d56d\" (UID: \"e4264086-12ed-4655-9657-14083653d56d\") " Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.510970 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/e4264086-12ed-4655-9657-14083653d56d-var-lib\") pod \"e4264086-12ed-4655-9657-14083653d56d\" (UID: \"e4264086-12ed-4655-9657-14083653d56d\") " Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.511002 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/e4264086-12ed-4655-9657-14083653d56d-var-log\") pod \"e4264086-12ed-4655-9657-14083653d56d\" (UID: \"e4264086-12ed-4655-9657-14083653d56d\") " Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.511020 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64p22\" (UniqueName: \"kubernetes.io/projected/e4264086-12ed-4655-9657-14083653d56d-kube-api-access-64p22\") pod \"e4264086-12ed-4655-9657-14083653d56d\" (UID: \"e4264086-12ed-4655-9657-14083653d56d\") " Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.511052 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/7b1a0e46-1fb4-4ab1-9417-cba939546529-lock\") pod \"7b1a0e46-1fb4-4ab1-9417-cba939546529\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.511071 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8zv5l\" (UniqueName: \"kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-kube-api-access-8zv5l\") pod \"7b1a0e46-1fb4-4ab1-9417-cba939546529\" (UID: \"7b1a0e46-1fb4-4ab1-9417-cba939546529\") " Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.511483 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e4264086-12ed-4655-9657-14083653d56d-var-log" (OuterVolumeSpecName: "var-log") pod "e4264086-12ed-4655-9657-14083653d56d" (UID: "e4264086-12ed-4655-9657-14083653d56d"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.511534 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e4264086-12ed-4655-9657-14083653d56d-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "e4264086-12ed-4655-9657-14083653d56d" (UID: "e4264086-12ed-4655-9657-14083653d56d"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.511560 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e4264086-12ed-4655-9657-14083653d56d-var-lib" (OuterVolumeSpecName: "var-lib") pod "e4264086-12ed-4655-9657-14083653d56d" (UID: "e4264086-12ed-4655-9657-14083653d56d"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.511916 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e4264086-12ed-4655-9657-14083653d56d-var-run" (OuterVolumeSpecName: "var-run") pod "e4264086-12ed-4655-9657-14083653d56d" (UID: "e4264086-12ed-4655-9657-14083653d56d"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.512222 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b1a0e46-1fb4-4ab1-9417-cba939546529-lock" (OuterVolumeSpecName: "lock") pod "7b1a0e46-1fb4-4ab1-9417-cba939546529" (UID: "7b1a0e46-1fb4-4ab1-9417-cba939546529"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.512280 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b1a0e46-1fb4-4ab1-9417-cba939546529-cache" (OuterVolumeSpecName: "cache") pod "7b1a0e46-1fb4-4ab1-9417-cba939546529" (UID: "7b1a0e46-1fb4-4ab1-9417-cba939546529"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.512886 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4264086-12ed-4655-9657-14083653d56d-scripts" (OuterVolumeSpecName: "scripts") pod "e4264086-12ed-4655-9657-14083653d56d" (UID: "e4264086-12ed-4655-9657-14083653d56d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.516624 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "7b1a0e46-1fb4-4ab1-9417-cba939546529" (UID: "7b1a0e46-1fb4-4ab1-9417-cba939546529"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.516640 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-kube-api-access-8zv5l" (OuterVolumeSpecName: "kube-api-access-8zv5l") pod "7b1a0e46-1fb4-4ab1-9417-cba939546529" (UID: "7b1a0e46-1fb4-4ab1-9417-cba939546529"). InnerVolumeSpecName "kube-api-access-8zv5l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.516997 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "swift") pod "7b1a0e46-1fb4-4ab1-9417-cba939546529" (UID: "7b1a0e46-1fb4-4ab1-9417-cba939546529"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.517131 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4264086-12ed-4655-9657-14083653d56d-kube-api-access-64p22" (OuterVolumeSpecName: "kube-api-access-64p22") pod "e4264086-12ed-4655-9657-14083653d56d" (UID: "e4264086-12ed-4655-9657-14083653d56d"). InnerVolumeSpecName "kube-api-access-64p22". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.612629 4869 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.612729 4869 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.612754 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e4264086-12ed-4655-9657-14083653d56d-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.612768 4869 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/e4264086-12ed-4655-9657-14083653d56d-etc-ovs\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.612780 4869 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/e4264086-12ed-4655-9657-14083653d56d-var-lib\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.612815 4869 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/e4264086-12ed-4655-9657-14083653d56d-var-log\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.612826 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64p22\" (UniqueName: \"kubernetes.io/projected/e4264086-12ed-4655-9657-14083653d56d-kube-api-access-64p22\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.612835 4869 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/7b1a0e46-1fb4-4ab1-9417-cba939546529-lock\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.612843 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8zv5l\" (UniqueName: \"kubernetes.io/projected/7b1a0e46-1fb4-4ab1-9417-cba939546529-kube-api-access-8zv5l\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.612851 4869 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/7b1a0e46-1fb4-4ab1-9417-cba939546529-cache\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.612859 4869 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e4264086-12ed-4655-9657-14083653d56d-var-run\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.626996 4869 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.714292 4869 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.732917 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b1a0e46-1fb4-4ab1-9417-cba939546529-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7b1a0e46-1fb4-4ab1-9417-cba939546529" (UID: "7b1a0e46-1fb4-4ab1-9417-cba939546529"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:16:38 crc kubenswrapper[4869]: I0130 11:16:38.815859 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b1a0e46-1fb4-4ab1-9417-cba939546529-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.223200 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-jfzdq_e4264086-12ed-4655-9657-14083653d56d/ovs-vswitchd/0.log" Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.224581 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-jfzdq" event={"ID":"e4264086-12ed-4655-9657-14083653d56d","Type":"ContainerDied","Data":"c12a8f1c5bed38e4e77664e98d4e2d1081bd60ab2a60dc82963d86b72ee2cbae"} Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.224639 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-jfzdq" Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.224654 4869 scope.go:117] "RemoveContainer" containerID="9361741e43c42081992e0c13715006076e25601472781c87f5b4a53ad68ee021" Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.231682 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.231591 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7b1a0e46-1fb4-4ab1-9417-cba939546529","Type":"ContainerDied","Data":"0e8afe951cff3c900f198b5d6d99a90ac991981ccb9568db72fc68bca99867dc"} Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.256082 4869 scope.go:117] "RemoveContainer" containerID="805b1442b60f401ada6c6d3f531a2bb0d8e8642d4e690a843a85619f334e744c" Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.267849 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-jfzdq"] Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.289751 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ovs-jfzdq"] Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.290939 4869 scope.go:117] "RemoveContainer" containerID="bc346a21b4ea582edb1fae9adeb54b86c6065b26a7c7ba55773b410b593e821d" Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.298518 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.305179 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-storage-0"] Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.313930 4869 scope.go:117] "RemoveContainer" containerID="e6f93331ee688f8b3d08b68521bc6870dc9ec3fe42c2459935fb484bbb47b43b" Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.338510 4869 scope.go:117] "RemoveContainer" containerID="65dd5e75cb2c7b06492f5521d624b519679d57118d8f7a7a48edaca957d584a1" Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.356052 4869 scope.go:117] "RemoveContainer" containerID="13ba9f6f7912e14e6780c387ebc1a2663d74dda7b49e064fb9abd88ab5e57f99" Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.371645 4869 scope.go:117] "RemoveContainer" containerID="f8504fd0ff5794faf6bacd1fef665e7a9cd6fedeedff24e1b282e17c56837bea" Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.391772 4869 scope.go:117] "RemoveContainer" containerID="97e9034f3bb0c1adcd5ddc73729cfb8a5551a31bba376f2f3e02026b27ed1f80" Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.408407 4869 scope.go:117] "RemoveContainer" containerID="6f9aa9147f317463724ec1dff3a40f0f2085d959d4963346ddc72ee9e85fd348" Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.426328 4869 scope.go:117] "RemoveContainer" containerID="5ef059d18651368542240591ca6f5fe4c03b5e8aa1d605e33f266d5c65f87088" Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.445995 4869 scope.go:117] "RemoveContainer" containerID="20275096c286a0ae8845e55c14e269921083b740de5bce384f7ce1e846435e9b" Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.463513 4869 scope.go:117] "RemoveContainer" containerID="b439885a7cc9ae304f7f36de7bcb53459f7f5b2bd2fc41ec4f6c9ebd8922553f" Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.479049 4869 scope.go:117] "RemoveContainer" containerID="e88f5b9e87740566349e22c0ad0a51c60c75ada4b7cc83a0dac45fffd0379d49" Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.497115 4869 scope.go:117] "RemoveContainer" containerID="124390ed6584fb765f5eaac9acb1121748399b48696c737116ce08db4dd7bfb0" Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.515977 4869 scope.go:117] "RemoveContainer" containerID="9062f1d85cf44cdab858a6bee6da1f5afe762771c26d3b6e53a3b618a6533e24" Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.534595 4869 scope.go:117] "RemoveContainer" containerID="59365e20746e6cb7518cf9197ed2d89e60bb59adac7d8138ec6dae90296333f2" Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.551909 4869 scope.go:117] "RemoveContainer" containerID="f485c0dfd027365de561b952055dc9630a3eb742e8b7b5fbc78313ec86a5772e" Jan 30 11:16:39 crc kubenswrapper[4869]: I0130 11:16:39.576751 4869 scope.go:117] "RemoveContainer" containerID="e8224a2745bbe075dafb160da11e1dafa60447cd322ab3064698b3cb694f996d" Jan 30 11:16:40 crc kubenswrapper[4869]: I0130 11:16:40.143057 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" path="/var/lib/kubelet/pods/7b1a0e46-1fb4-4ab1-9417-cba939546529/volumes" Jan 30 11:16:40 crc kubenswrapper[4869]: I0130 11:16:40.144896 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4264086-12ed-4655-9657-14083653d56d" path="/var/lib/kubelet/pods/e4264086-12ed-4655-9657-14083653d56d/volumes" Jan 30 11:17:51 crc kubenswrapper[4869]: I0130 11:17:51.769211 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:17:51 crc kubenswrapper[4869]: I0130 11:17:51.769777 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:18:12 crc kubenswrapper[4869]: I0130 11:18:12.644091 4869 scope.go:117] "RemoveContainer" containerID="5412689076dd8c8ded30415d779a828810d54b049f71dbf07b75a1f99c3a2172" Jan 30 11:18:12 crc kubenswrapper[4869]: I0130 11:18:12.683339 4869 scope.go:117] "RemoveContainer" containerID="782d83205eace0b4d457f7add8f76d44c15bd4e233db6a43dd415a838973c7aa" Jan 30 11:18:12 crc kubenswrapper[4869]: I0130 11:18:12.708562 4869 scope.go:117] "RemoveContainer" containerID="ee92fad2a749ee8d2b76ed5e3e331a021f8ec879fcbe4e3a84b98b8a4e28bf2c" Jan 30 11:18:12 crc kubenswrapper[4869]: I0130 11:18:12.742991 4869 scope.go:117] "RemoveContainer" containerID="9050506c48afca5796f54434fc2e6a2df0c465c34f3a76fd4c4a1d174e1a986d" Jan 30 11:18:12 crc kubenswrapper[4869]: I0130 11:18:12.764808 4869 scope.go:117] "RemoveContainer" containerID="b1c838bbede5bc60bbbb80d7295da7bc641b43ddd0028dc86a903f5a803730ce" Jan 30 11:18:12 crc kubenswrapper[4869]: I0130 11:18:12.791983 4869 scope.go:117] "RemoveContainer" containerID="3e68c3724a9ab3f614c802fd8e24540459ddd7f937cae24b3480a37e300d0143" Jan 30 11:18:12 crc kubenswrapper[4869]: I0130 11:18:12.814932 4869 scope.go:117] "RemoveContainer" containerID="104a07f97fb6c7315162653073ff78d2591275c5c90c23f25e1e1792fa663e96" Jan 30 11:18:12 crc kubenswrapper[4869]: I0130 11:18:12.833334 4869 scope.go:117] "RemoveContainer" containerID="29dfd9cf1cbd1da829ec5f39674e87b03d77f8127500a99c65f6230faca4edba" Jan 30 11:18:12 crc kubenswrapper[4869]: I0130 11:18:12.857257 4869 scope.go:117] "RemoveContainer" containerID="1a899ce54a669e416e3efafc56286fab9b4ed702e1c2a4e1d21f491569dad96b" Jan 30 11:18:12 crc kubenswrapper[4869]: I0130 11:18:12.886828 4869 scope.go:117] "RemoveContainer" containerID="de0f4d059a4550335bce61729aa4f9cc7d282f7512964c918bbc226587317020" Jan 30 11:18:12 crc kubenswrapper[4869]: I0130 11:18:12.905525 4869 scope.go:117] "RemoveContainer" containerID="7ee90326a4c74e4c96e0ebf6f541fb48ad1a22d16977717912a57be333a828f4" Jan 30 11:18:12 crc kubenswrapper[4869]: I0130 11:18:12.923022 4869 scope.go:117] "RemoveContainer" containerID="57ecd3103f17790e1e95ee6aac684d14a887854c2c1aa6515e8fe0bef4d0a5ba" Jan 30 11:18:12 crc kubenswrapper[4869]: I0130 11:18:12.944106 4869 scope.go:117] "RemoveContainer" containerID="70a7311ab926d3674361f8b0f2b836b0d2d1f74952207b1f7074f68482cfaa04" Jan 30 11:18:12 crc kubenswrapper[4869]: I0130 11:18:12.966626 4869 scope.go:117] "RemoveContainer" containerID="e03176144d3d2b9313e5fbda7411d00b5f5bb98b3f758dbfeda55c37b4b51417" Jan 30 11:18:12 crc kubenswrapper[4869]: I0130 11:18:12.987004 4869 scope.go:117] "RemoveContainer" containerID="075eb9430b2d0c542307c4443b0f873f573935612450db8c9b4d96f44753db02" Jan 30 11:18:13 crc kubenswrapper[4869]: I0130 11:18:13.029115 4869 scope.go:117] "RemoveContainer" containerID="0be86ef1bd8fec824fe2385731b416873ec19562cb9c12407f448b3913f77e3e" Jan 30 11:18:13 crc kubenswrapper[4869]: I0130 11:18:13.047060 4869 scope.go:117] "RemoveContainer" containerID="f0436d40c14323a1dde1389e6d1c8d2fc2eb8394c2ad72ae9f5ed44b19148575" Jan 30 11:18:13 crc kubenswrapper[4869]: I0130 11:18:13.063426 4869 scope.go:117] "RemoveContainer" containerID="e5ced6cbb69c7f8d3419ec3e36cf4944ee0d4070b2698a3b94fb7b264d9892dd" Jan 30 11:18:21 crc kubenswrapper[4869]: I0130 11:18:21.769500 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:18:21 crc kubenswrapper[4869]: I0130 11:18:21.770061 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:18:51 crc kubenswrapper[4869]: I0130 11:18:51.770070 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:18:51 crc kubenswrapper[4869]: I0130 11:18:51.770645 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:18:51 crc kubenswrapper[4869]: I0130 11:18:51.770688 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 11:18:51 crc kubenswrapper[4869]: I0130 11:18:51.771377 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cabcf877f44bd0de25c7db6df0dc9b22d4324a2881d9c97569218164d3a2997b"} pod="openshift-machine-config-operator/machine-config-daemon-99lr2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 11:18:51 crc kubenswrapper[4869]: I0130 11:18:51.771434 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" containerID="cri-o://cabcf877f44bd0de25c7db6df0dc9b22d4324a2881d9c97569218164d3a2997b" gracePeriod=600 Jan 30 11:18:52 crc kubenswrapper[4869]: I0130 11:18:52.345451 4869 generic.go:334] "Generic (PLEG): container finished" podID="ef13186b-7f82-4025-97e3-d899be8c207f" containerID="cabcf877f44bd0de25c7db6df0dc9b22d4324a2881d9c97569218164d3a2997b" exitCode=0 Jan 30 11:18:52 crc kubenswrapper[4869]: I0130 11:18:52.345829 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerDied","Data":"cabcf877f44bd0de25c7db6df0dc9b22d4324a2881d9c97569218164d3a2997b"} Jan 30 11:18:52 crc kubenswrapper[4869]: I0130 11:18:52.345871 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerStarted","Data":"9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61"} Jan 30 11:18:52 crc kubenswrapper[4869]: I0130 11:18:52.345893 4869 scope.go:117] "RemoveContainer" containerID="2d30cb107250bb27f981051333540bce0f94a645c35535aa9330fd41a7dff2ba" Jan 30 11:19:13 crc kubenswrapper[4869]: I0130 11:19:13.284010 4869 scope.go:117] "RemoveContainer" containerID="10cf6b361a191a48180d5a15a376bed618b2ddcb19f98a9c3a51139c9385393c" Jan 30 11:19:13 crc kubenswrapper[4869]: I0130 11:19:13.331083 4869 scope.go:117] "RemoveContainer" containerID="f44e61413ed4598ca9cdf8a2649ba579d0a8e3d211a34b0fc3ad0dc71501c511" Jan 30 11:19:13 crc kubenswrapper[4869]: I0130 11:19:13.378534 4869 scope.go:117] "RemoveContainer" containerID="ee029ceb18de8c7ea264247d2be5d95f2b3fc59d8b8c4a57e75cfd5dadb30322" Jan 30 11:19:13 crc kubenswrapper[4869]: I0130 11:19:13.445424 4869 scope.go:117] "RemoveContainer" containerID="685feecd6eaaece16ff8456bc67ddc2bf170e620ad33736fe1c4a7ee80e38f46" Jan 30 11:19:13 crc kubenswrapper[4869]: I0130 11:19:13.486199 4869 scope.go:117] "RemoveContainer" containerID="066fc7f3790053de7cae6caa9f1d67b9a3ad3696f73c95279d935a734f52c6d1" Jan 30 11:19:13 crc kubenswrapper[4869]: I0130 11:19:13.518867 4869 scope.go:117] "RemoveContainer" containerID="f9e32cc6f22599826f2c7ba35fc0e547c20981a4190c6a91c483999252cd7e29" Jan 30 11:19:13 crc kubenswrapper[4869]: I0130 11:19:13.536477 4869 scope.go:117] "RemoveContainer" containerID="d6f7e90459e93a0c1cb0bd18e82b5a0a8e2bdb2ce7d8119dcb5ef5ced7f57ff7" Jan 30 11:19:13 crc kubenswrapper[4869]: I0130 11:19:13.562416 4869 scope.go:117] "RemoveContainer" containerID="9de643e0adb20ada99ce0f5134b6c4728242a55ce93d1ec0e43ce5db03b38852" Jan 30 11:20:13 crc kubenswrapper[4869]: I0130 11:20:13.715194 4869 scope.go:117] "RemoveContainer" containerID="ecbb01cd1bd2c82e2943bf2f2101e86a6978d1af67f8df56a057053148d323ab" Jan 30 11:20:13 crc kubenswrapper[4869]: I0130 11:20:13.737931 4869 scope.go:117] "RemoveContainer" containerID="1abc3e2a19875a5d076b39d5251995e37da6b95e6a34f3616e85a54e3f5d1c08" Jan 30 11:20:13 crc kubenswrapper[4869]: I0130 11:20:13.755277 4869 scope.go:117] "RemoveContainer" containerID="c8372e95ef80d324e58700717c686cf6517c90804f250faf3d73bbe912f0a6a3" Jan 30 11:20:13 crc kubenswrapper[4869]: I0130 11:20:13.781831 4869 scope.go:117] "RemoveContainer" containerID="ace5b38b794c9cefda59f56dfa4f520cafde0d2f1d086b9bf339aaf0082fc0ff" Jan 30 11:20:13 crc kubenswrapper[4869]: I0130 11:20:13.798183 4869 scope.go:117] "RemoveContainer" containerID="6ff5e8125956cbc55ddecf51fea9c1dd9fdc0fb71f0b06392215227e5d522a69" Jan 30 11:20:13 crc kubenswrapper[4869]: I0130 11:20:13.840261 4869 scope.go:117] "RemoveContainer" containerID="8ef795eee6cad526331d1ec40516e7e05de95f60e7c7819fd56bcb9dfd8c5e61" Jan 30 11:20:13 crc kubenswrapper[4869]: I0130 11:20:13.860851 4869 scope.go:117] "RemoveContainer" containerID="45865b93886b8857980054d339f929f43d0d91edc599cbbafd11a60132b05945" Jan 30 11:20:13 crc kubenswrapper[4869]: I0130 11:20:13.881947 4869 scope.go:117] "RemoveContainer" containerID="ec3119367a64d160d3d0df8968c569ef2ac842f41ee741d524838930b96fba12" Jan 30 11:20:13 crc kubenswrapper[4869]: I0130 11:20:13.917063 4869 scope.go:117] "RemoveContainer" containerID="75cb85d47c4f23763e64b6970bb9222234b6d481a8bbac78888a76d4dd1f8613" Jan 30 11:20:13 crc kubenswrapper[4869]: I0130 11:20:13.944354 4869 scope.go:117] "RemoveContainer" containerID="6f58056d40518f7f08d5b89fddc6140fcb4975e4b4047401e6bcc1e12b2f6a6f" Jan 30 11:20:13 crc kubenswrapper[4869]: I0130 11:20:13.960397 4869 scope.go:117] "RemoveContainer" containerID="15f42f64893266a96f35bcf2c3e762836f56e50d156b31125fffc23d316c68c5" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.405847 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gl6ts"] Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.406775 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="493ac356-9bec-4285-850c-8e3c7739641e" containerName="openstack-network-exporter" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.406789 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="493ac356-9bec-4285-850c-8e3c7739641e" containerName="openstack-network-exporter" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.406797 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15b1a123-3831-4fa6-bc52-3f0cf30953f9" containerName="setup-container" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.406803 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="15b1a123-3831-4fa6-bc52-3f0cf30953f9" containerName="setup-container" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.406846 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65d95395-5aea-4546-b12a-ec8ce58ec704" containerName="cinder-api-log" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.406853 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="65d95395-5aea-4546-b12a-ec8ce58ec704" containerName="cinder-api-log" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.406863 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4264086-12ed-4655-9657-14083653d56d" containerName="ovsdb-server" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.406868 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4264086-12ed-4655-9657-14083653d56d" containerName="ovsdb-server" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.406877 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="161960a2-9537-4f72-913b-54b23f2b4be7" containerName="probe" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.406883 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="161960a2-9537-4f72-913b-54b23f2b4be7" containerName="probe" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.406902 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b94e67f1-cfa7-4470-96ad-440a78a7707e" containerName="sg-core" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.406909 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b94e67f1-cfa7-4470-96ad-440a78a7707e" containerName="sg-core" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.406916 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b94e67f1-cfa7-4470-96ad-440a78a7707e" containerName="ceilometer-central-agent" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.406922 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b94e67f1-cfa7-4470-96ad-440a78a7707e" containerName="ceilometer-central-agent" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.406933 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b94e67f1-cfa7-4470-96ad-440a78a7707e" containerName="ceilometer-notification-agent" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.406938 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b94e67f1-cfa7-4470-96ad-440a78a7707e" containerName="ceilometer-notification-agent" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.406945 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="493ac356-9bec-4285-850c-8e3c7739641e" containerName="ovn-northd" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.406951 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="493ac356-9bec-4285-850c-8e3c7739641e" containerName="ovn-northd" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.406957 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ae8a334-b758-420e-8aae-a3f6437f9816" containerName="glance-httpd" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.406963 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ae8a334-b758-420e-8aae-a3f6437f9816" containerName="glance-httpd" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.406974 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="484ba6c3-20dc-4b27-b7f5-901eef0643a7" containerName="neutron-api" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.406979 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="484ba6c3-20dc-4b27-b7f5-901eef0643a7" containerName="neutron-api" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.406987 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d945c4ca-288d-4e49-9048-b66894b7e97f" containerName="nova-metadata-log" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.406994 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d945c4ca-288d-4e49-9048-b66894b7e97f" containerName="nova-metadata-log" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407005 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec34c29c-665f-465a-99d0-c342aca2cf14" containerName="glance-httpd" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407011 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec34c29c-665f-465a-99d0-c342aca2cf14" containerName="glance-httpd" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407019 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="object-expirer" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407025 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="object-expirer" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407034 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1" containerName="keystone-api" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407040 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1" containerName="keystone-api" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407047 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="account-server" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407053 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="account-server" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407063 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65d95395-5aea-4546-b12a-ec8ce58ec704" containerName="cinder-api" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407070 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="65d95395-5aea-4546-b12a-ec8ce58ec704" containerName="cinder-api" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407078 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="rsync" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407083 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="rsync" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407089 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4264086-12ed-4655-9657-14083653d56d" containerName="ovsdb-server-init" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407095 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4264086-12ed-4655-9657-14083653d56d" containerName="ovsdb-server-init" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407101 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec34c29c-665f-465a-99d0-c342aca2cf14" containerName="glance-log" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407131 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec34c29c-665f-465a-99d0-c342aca2cf14" containerName="glance-log" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407248 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7eb4552-ad08-470d-b4c5-63c937f11717" containerName="ovn-controller" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407260 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7eb4552-ad08-470d-b4c5-63c937f11717" containerName="ovn-controller" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407266 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34532f6a-b213-422d-8126-d74d95c32497" containerName="galera" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407272 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="34532f6a-b213-422d-8126-d74d95c32497" containerName="galera" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407280 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="container-replicator" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407294 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="container-replicator" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407305 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="account-replicator" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407311 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="account-replicator" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407320 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="swift-recon-cron" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407326 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="swift-recon-cron" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407337 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="object-updater" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407342 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="object-updater" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407349 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d945c4ca-288d-4e49-9048-b66894b7e97f" containerName="nova-metadata-metadata" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407355 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d945c4ca-288d-4e49-9048-b66894b7e97f" containerName="nova-metadata-metadata" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407364 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="439024e7-e7a3-42c4-b9a1-db6705ec33d2" containerName="nova-cell1-conductor-conductor" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407370 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="439024e7-e7a3-42c4-b9a1-db6705ec33d2" containerName="nova-cell1-conductor-conductor" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407378 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d1e4183-a136-428f-9bd8-e857a603da8f" containerName="rabbitmq" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407384 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d1e4183-a136-428f-9bd8-e857a603da8f" containerName="rabbitmq" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407392 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="object-server" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407397 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="object-server" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407406 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="161960a2-9537-4f72-913b-54b23f2b4be7" containerName="cinder-scheduler" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407411 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="161960a2-9537-4f72-913b-54b23f2b4be7" containerName="cinder-scheduler" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407422 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="account-auditor" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407428 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="account-auditor" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407435 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f7d516c-1685-4033-891f-64008f56a468" containerName="nova-api-api" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407441 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f7d516c-1685-4033-891f-64008f56a468" containerName="nova-api-api" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407451 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5b9f902-0038-4057-b1c2-66222926c1b5" containerName="mariadb-account-create-update" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407457 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5b9f902-0038-4057-b1c2-66222926c1b5" containerName="mariadb-account-create-update" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407464 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="container-server" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407469 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="container-server" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407478 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d1e4183-a136-428f-9bd8-e857a603da8f" containerName="setup-container" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407484 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d1e4183-a136-428f-9bd8-e857a603da8f" containerName="setup-container" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407497 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="account-reaper" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407503 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="account-reaper" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407511 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f7d516c-1685-4033-891f-64008f56a468" containerName="nova-api-log" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407518 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f7d516c-1685-4033-891f-64008f56a468" containerName="nova-api-log" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407526 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ae8a334-b758-420e-8aae-a3f6437f9816" containerName="glance-log" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407533 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ae8a334-b758-420e-8aae-a3f6437f9816" containerName="glance-log" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407539 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d098b42f-f300-4308-93b0-fe2af785ce4c" containerName="kube-state-metrics" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407545 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d098b42f-f300-4308-93b0-fe2af785ce4c" containerName="kube-state-metrics" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407554 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="container-auditor" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407559 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="container-auditor" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407568 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74632136-6311-4daa-80c7-4c32c20d6a4a" containerName="barbican-api-log" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407573 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="74632136-6311-4daa-80c7-4c32c20d6a4a" containerName="barbican-api-log" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407584 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15b1a123-3831-4fa6-bc52-3f0cf30953f9" containerName="rabbitmq" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407591 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="15b1a123-3831-4fa6-bc52-3f0cf30953f9" containerName="rabbitmq" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407600 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="484ba6c3-20dc-4b27-b7f5-901eef0643a7" containerName="neutron-httpd" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407607 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="484ba6c3-20dc-4b27-b7f5-901eef0643a7" containerName="neutron-httpd" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407616 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b" containerName="memcached" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407625 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b" containerName="memcached" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407634 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4264086-12ed-4655-9657-14083653d56d" containerName="ovs-vswitchd" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407644 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4264086-12ed-4655-9657-14083653d56d" containerName="ovs-vswitchd" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407654 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="object-auditor" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407660 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="object-auditor" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407669 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="object-replicator" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407676 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="object-replicator" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407685 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5b9f902-0038-4057-b1c2-66222926c1b5" containerName="mariadb-account-create-update" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407692 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5b9f902-0038-4057-b1c2-66222926c1b5" containerName="mariadb-account-create-update" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407723 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b94e67f1-cfa7-4470-96ad-440a78a7707e" containerName="proxy-httpd" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407730 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b94e67f1-cfa7-4470-96ad-440a78a7707e" containerName="proxy-httpd" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407739 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74632136-6311-4daa-80c7-4c32c20d6a4a" containerName="barbican-api" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407745 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="74632136-6311-4daa-80c7-4c32c20d6a4a" containerName="barbican-api" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407755 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34532f6a-b213-422d-8126-d74d95c32497" containerName="mysql-bootstrap" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407761 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="34532f6a-b213-422d-8126-d74d95c32497" containerName="mysql-bootstrap" Jan 30 11:20:33 crc kubenswrapper[4869]: E0130 11:20:33.407768 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="container-updater" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407773 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="container-updater" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407926 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfa9c8cb-c5b4-4112-ac55-e51ea9257a7b" containerName="memcached" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407934 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="161960a2-9537-4f72-913b-54b23f2b4be7" containerName="cinder-scheduler" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407944 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbfbbf2e-d6ec-4a50-a7b6-13c8a671b1f1" containerName="keystone-api" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407951 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="74632136-6311-4daa-80c7-4c32c20d6a4a" containerName="barbican-api" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407962 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f7d516c-1685-4033-891f-64008f56a468" containerName="nova-api-log" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407972 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="account-reaper" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407979 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="rsync" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407988 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="161960a2-9537-4f72-913b-54b23f2b4be7" containerName="probe" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.407997 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="65d95395-5aea-4546-b12a-ec8ce58ec704" containerName="cinder-api" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408004 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec34c29c-665f-465a-99d0-c342aca2cf14" containerName="glance-log" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408013 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f7d516c-1685-4033-891f-64008f56a468" containerName="nova-api-api" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408019 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b94e67f1-cfa7-4470-96ad-440a78a7707e" containerName="ceilometer-central-agent" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408029 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="65d95395-5aea-4546-b12a-ec8ce58ec704" containerName="cinder-api-log" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408037 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="account-auditor" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408044 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="account-replicator" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408050 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="d945c4ca-288d-4e49-9048-b66894b7e97f" containerName="nova-metadata-log" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408058 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="container-updater" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408066 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ae8a334-b758-420e-8aae-a3f6437f9816" containerName="glance-log" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408075 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="15b1a123-3831-4fa6-bc52-3f0cf30953f9" containerName="rabbitmq" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408084 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="34532f6a-b213-422d-8126-d74d95c32497" containerName="galera" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408091 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d1e4183-a136-428f-9bd8-e857a603da8f" containerName="rabbitmq" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408098 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="container-replicator" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408103 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="object-expirer" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408110 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="493ac356-9bec-4285-850c-8e3c7739641e" containerName="openstack-network-exporter" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408119 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="object-updater" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408126 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="object-replicator" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408137 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="493ac356-9bec-4285-850c-8e3c7739641e" containerName="ovn-northd" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408145 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="484ba6c3-20dc-4b27-b7f5-901eef0643a7" containerName="neutron-api" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408153 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="swift-recon-cron" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408162 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec34c29c-665f-465a-99d0-c342aca2cf14" containerName="glance-httpd" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408168 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="account-server" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408175 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="object-server" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408184 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="container-server" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408193 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="439024e7-e7a3-42c4-b9a1-db6705ec33d2" containerName="nova-cell1-conductor-conductor" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408200 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="container-auditor" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408207 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5b9f902-0038-4057-b1c2-66222926c1b5" containerName="mariadb-account-create-update" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408227 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7eb4552-ad08-470d-b4c5-63c937f11717" containerName="ovn-controller" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408236 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b94e67f1-cfa7-4470-96ad-440a78a7707e" containerName="sg-core" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408245 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b94e67f1-cfa7-4470-96ad-440a78a7707e" containerName="ceilometer-notification-agent" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408252 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4264086-12ed-4655-9657-14083653d56d" containerName="ovsdb-server" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408260 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ae8a334-b758-420e-8aae-a3f6437f9816" containerName="glance-httpd" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408268 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4264086-12ed-4655-9657-14083653d56d" containerName="ovs-vswitchd" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408275 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b1a0e46-1fb4-4ab1-9417-cba939546529" containerName="object-auditor" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408285 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="d945c4ca-288d-4e49-9048-b66894b7e97f" containerName="nova-metadata-metadata" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408293 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b94e67f1-cfa7-4470-96ad-440a78a7707e" containerName="proxy-httpd" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408299 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="484ba6c3-20dc-4b27-b7f5-901eef0643a7" containerName="neutron-httpd" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408306 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5b9f902-0038-4057-b1c2-66222926c1b5" containerName="mariadb-account-create-update" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408314 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="74632136-6311-4daa-80c7-4c32c20d6a4a" containerName="barbican-api-log" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.408321 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="d098b42f-f300-4308-93b0-fe2af785ce4c" containerName="kube-state-metrics" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.410815 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gl6ts" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.424396 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gl6ts"] Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.536168 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e68c683-86bf-47fe-887f-289d72c93b1c-utilities\") pod \"certified-operators-gl6ts\" (UID: \"3e68c683-86bf-47fe-887f-289d72c93b1c\") " pod="openshift-marketplace/certified-operators-gl6ts" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.536219 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzf7t\" (UniqueName: \"kubernetes.io/projected/3e68c683-86bf-47fe-887f-289d72c93b1c-kube-api-access-hzf7t\") pod \"certified-operators-gl6ts\" (UID: \"3e68c683-86bf-47fe-887f-289d72c93b1c\") " pod="openshift-marketplace/certified-operators-gl6ts" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.536394 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e68c683-86bf-47fe-887f-289d72c93b1c-catalog-content\") pod \"certified-operators-gl6ts\" (UID: \"3e68c683-86bf-47fe-887f-289d72c93b1c\") " pod="openshift-marketplace/certified-operators-gl6ts" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.637863 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e68c683-86bf-47fe-887f-289d72c93b1c-catalog-content\") pod \"certified-operators-gl6ts\" (UID: \"3e68c683-86bf-47fe-887f-289d72c93b1c\") " pod="openshift-marketplace/certified-operators-gl6ts" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.637930 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e68c683-86bf-47fe-887f-289d72c93b1c-utilities\") pod \"certified-operators-gl6ts\" (UID: \"3e68c683-86bf-47fe-887f-289d72c93b1c\") " pod="openshift-marketplace/certified-operators-gl6ts" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.637967 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzf7t\" (UniqueName: \"kubernetes.io/projected/3e68c683-86bf-47fe-887f-289d72c93b1c-kube-api-access-hzf7t\") pod \"certified-operators-gl6ts\" (UID: \"3e68c683-86bf-47fe-887f-289d72c93b1c\") " pod="openshift-marketplace/certified-operators-gl6ts" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.638396 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e68c683-86bf-47fe-887f-289d72c93b1c-catalog-content\") pod \"certified-operators-gl6ts\" (UID: \"3e68c683-86bf-47fe-887f-289d72c93b1c\") " pod="openshift-marketplace/certified-operators-gl6ts" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.638692 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e68c683-86bf-47fe-887f-289d72c93b1c-utilities\") pod \"certified-operators-gl6ts\" (UID: \"3e68c683-86bf-47fe-887f-289d72c93b1c\") " pod="openshift-marketplace/certified-operators-gl6ts" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.669689 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzf7t\" (UniqueName: \"kubernetes.io/projected/3e68c683-86bf-47fe-887f-289d72c93b1c-kube-api-access-hzf7t\") pod \"certified-operators-gl6ts\" (UID: \"3e68c683-86bf-47fe-887f-289d72c93b1c\") " pod="openshift-marketplace/certified-operators-gl6ts" Jan 30 11:20:33 crc kubenswrapper[4869]: I0130 11:20:33.741193 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gl6ts" Jan 30 11:20:34 crc kubenswrapper[4869]: I0130 11:20:34.243151 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gl6ts"] Jan 30 11:20:35 crc kubenswrapper[4869]: I0130 11:20:35.145292 4869 generic.go:334] "Generic (PLEG): container finished" podID="3e68c683-86bf-47fe-887f-289d72c93b1c" containerID="b43ada421df947fac5ed4dc0172df2fec32456d868e31a3c8d678b4306be10dc" exitCode=0 Jan 30 11:20:35 crc kubenswrapper[4869]: I0130 11:20:35.145400 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gl6ts" event={"ID":"3e68c683-86bf-47fe-887f-289d72c93b1c","Type":"ContainerDied","Data":"b43ada421df947fac5ed4dc0172df2fec32456d868e31a3c8d678b4306be10dc"} Jan 30 11:20:35 crc kubenswrapper[4869]: I0130 11:20:35.145612 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gl6ts" event={"ID":"3e68c683-86bf-47fe-887f-289d72c93b1c","Type":"ContainerStarted","Data":"0afb624ad113e3cf8fc0b1d98e0990463e26d6392182ea09eaa469935870a3a8"} Jan 30 11:20:35 crc kubenswrapper[4869]: I0130 11:20:35.148496 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 11:20:36 crc kubenswrapper[4869]: I0130 11:20:36.154950 4869 generic.go:334] "Generic (PLEG): container finished" podID="3e68c683-86bf-47fe-887f-289d72c93b1c" containerID="69701c87e5a2ab134cf47f619c097e3982e81ad7471a2b801a35ac6830df0090" exitCode=0 Jan 30 11:20:36 crc kubenswrapper[4869]: I0130 11:20:36.155195 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gl6ts" event={"ID":"3e68c683-86bf-47fe-887f-289d72c93b1c","Type":"ContainerDied","Data":"69701c87e5a2ab134cf47f619c097e3982e81ad7471a2b801a35ac6830df0090"} Jan 30 11:20:37 crc kubenswrapper[4869]: I0130 11:20:37.167182 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gl6ts" event={"ID":"3e68c683-86bf-47fe-887f-289d72c93b1c","Type":"ContainerStarted","Data":"61f8b5a062a9eb5bc0fa15728aa4a3bf2efccb7852187ca7b77f3bfdc8df6ccc"} Jan 30 11:20:37 crc kubenswrapper[4869]: I0130 11:20:37.187365 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gl6ts" podStartSLOduration=2.561574076 podStartE2EDuration="4.187340675s" podCreationTimestamp="2026-01-30 11:20:33 +0000 UTC" firstStartedPulling="2026-01-30 11:20:35.148159804 +0000 UTC m=+1585.698035870" lastFinishedPulling="2026-01-30 11:20:36.773926413 +0000 UTC m=+1587.323802469" observedRunningTime="2026-01-30 11:20:37.182375854 +0000 UTC m=+1587.732251920" watchObservedRunningTime="2026-01-30 11:20:37.187340675 +0000 UTC m=+1587.737216741" Jan 30 11:20:43 crc kubenswrapper[4869]: I0130 11:20:43.741397 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gl6ts" Jan 30 11:20:43 crc kubenswrapper[4869]: I0130 11:20:43.741747 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gl6ts" Jan 30 11:20:43 crc kubenswrapper[4869]: I0130 11:20:43.785846 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gl6ts" Jan 30 11:20:44 crc kubenswrapper[4869]: I0130 11:20:44.048650 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nngtl"] Jan 30 11:20:44 crc kubenswrapper[4869]: I0130 11:20:44.050381 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nngtl" Jan 30 11:20:44 crc kubenswrapper[4869]: I0130 11:20:44.067028 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nngtl"] Jan 30 11:20:44 crc kubenswrapper[4869]: I0130 11:20:44.190118 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd76d919-f21a-458d-9903-ea3180418753-utilities\") pod \"redhat-marketplace-nngtl\" (UID: \"dd76d919-f21a-458d-9903-ea3180418753\") " pod="openshift-marketplace/redhat-marketplace-nngtl" Jan 30 11:20:44 crc kubenswrapper[4869]: I0130 11:20:44.190258 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd76d919-f21a-458d-9903-ea3180418753-catalog-content\") pod \"redhat-marketplace-nngtl\" (UID: \"dd76d919-f21a-458d-9903-ea3180418753\") " pod="openshift-marketplace/redhat-marketplace-nngtl" Jan 30 11:20:44 crc kubenswrapper[4869]: I0130 11:20:44.190283 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftql2\" (UniqueName: \"kubernetes.io/projected/dd76d919-f21a-458d-9903-ea3180418753-kube-api-access-ftql2\") pod \"redhat-marketplace-nngtl\" (UID: \"dd76d919-f21a-458d-9903-ea3180418753\") " pod="openshift-marketplace/redhat-marketplace-nngtl" Jan 30 11:20:44 crc kubenswrapper[4869]: I0130 11:20:44.260523 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gl6ts" Jan 30 11:20:44 crc kubenswrapper[4869]: I0130 11:20:44.293139 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd76d919-f21a-458d-9903-ea3180418753-utilities\") pod \"redhat-marketplace-nngtl\" (UID: \"dd76d919-f21a-458d-9903-ea3180418753\") " pod="openshift-marketplace/redhat-marketplace-nngtl" Jan 30 11:20:44 crc kubenswrapper[4869]: I0130 11:20:44.293381 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd76d919-f21a-458d-9903-ea3180418753-catalog-content\") pod \"redhat-marketplace-nngtl\" (UID: \"dd76d919-f21a-458d-9903-ea3180418753\") " pod="openshift-marketplace/redhat-marketplace-nngtl" Jan 30 11:20:44 crc kubenswrapper[4869]: I0130 11:20:44.293408 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftql2\" (UniqueName: \"kubernetes.io/projected/dd76d919-f21a-458d-9903-ea3180418753-kube-api-access-ftql2\") pod \"redhat-marketplace-nngtl\" (UID: \"dd76d919-f21a-458d-9903-ea3180418753\") " pod="openshift-marketplace/redhat-marketplace-nngtl" Jan 30 11:20:44 crc kubenswrapper[4869]: I0130 11:20:44.293785 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd76d919-f21a-458d-9903-ea3180418753-utilities\") pod \"redhat-marketplace-nngtl\" (UID: \"dd76d919-f21a-458d-9903-ea3180418753\") " pod="openshift-marketplace/redhat-marketplace-nngtl" Jan 30 11:20:44 crc kubenswrapper[4869]: I0130 11:20:44.293875 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd76d919-f21a-458d-9903-ea3180418753-catalog-content\") pod \"redhat-marketplace-nngtl\" (UID: \"dd76d919-f21a-458d-9903-ea3180418753\") " pod="openshift-marketplace/redhat-marketplace-nngtl" Jan 30 11:20:44 crc kubenswrapper[4869]: I0130 11:20:44.322107 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftql2\" (UniqueName: \"kubernetes.io/projected/dd76d919-f21a-458d-9903-ea3180418753-kube-api-access-ftql2\") pod \"redhat-marketplace-nngtl\" (UID: \"dd76d919-f21a-458d-9903-ea3180418753\") " pod="openshift-marketplace/redhat-marketplace-nngtl" Jan 30 11:20:44 crc kubenswrapper[4869]: I0130 11:20:44.373350 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nngtl" Jan 30 11:20:44 crc kubenswrapper[4869]: I0130 11:20:44.801339 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nngtl"] Jan 30 11:20:45 crc kubenswrapper[4869]: I0130 11:20:45.220389 4869 generic.go:334] "Generic (PLEG): container finished" podID="dd76d919-f21a-458d-9903-ea3180418753" containerID="f4e6176058b0049f1c750fd195172a0e21952d2d10e9e7da049b256604d83450" exitCode=0 Jan 30 11:20:45 crc kubenswrapper[4869]: I0130 11:20:45.220490 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nngtl" event={"ID":"dd76d919-f21a-458d-9903-ea3180418753","Type":"ContainerDied","Data":"f4e6176058b0049f1c750fd195172a0e21952d2d10e9e7da049b256604d83450"} Jan 30 11:20:45 crc kubenswrapper[4869]: I0130 11:20:45.220540 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nngtl" event={"ID":"dd76d919-f21a-458d-9903-ea3180418753","Type":"ContainerStarted","Data":"72f1efbda42ad4173a07d3ef7ccd5739c38723117f9e5724d26c204dbe5eb24f"} Jan 30 11:20:46 crc kubenswrapper[4869]: I0130 11:20:46.228610 4869 generic.go:334] "Generic (PLEG): container finished" podID="dd76d919-f21a-458d-9903-ea3180418753" containerID="333c5eebc4986481b573bec2216805f74f213009201217b4f8f2bba0fc8944b5" exitCode=0 Jan 30 11:20:46 crc kubenswrapper[4869]: I0130 11:20:46.228751 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nngtl" event={"ID":"dd76d919-f21a-458d-9903-ea3180418753","Type":"ContainerDied","Data":"333c5eebc4986481b573bec2216805f74f213009201217b4f8f2bba0fc8944b5"} Jan 30 11:20:46 crc kubenswrapper[4869]: I0130 11:20:46.619800 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gl6ts"] Jan 30 11:20:46 crc kubenswrapper[4869]: I0130 11:20:46.620072 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gl6ts" podUID="3e68c683-86bf-47fe-887f-289d72c93b1c" containerName="registry-server" containerID="cri-o://61f8b5a062a9eb5bc0fa15728aa4a3bf2efccb7852187ca7b77f3bfdc8df6ccc" gracePeriod=2 Jan 30 11:20:47 crc kubenswrapper[4869]: I0130 11:20:47.237471 4869 generic.go:334] "Generic (PLEG): container finished" podID="3e68c683-86bf-47fe-887f-289d72c93b1c" containerID="61f8b5a062a9eb5bc0fa15728aa4a3bf2efccb7852187ca7b77f3bfdc8df6ccc" exitCode=0 Jan 30 11:20:47 crc kubenswrapper[4869]: I0130 11:20:47.237769 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gl6ts" event={"ID":"3e68c683-86bf-47fe-887f-289d72c93b1c","Type":"ContainerDied","Data":"61f8b5a062a9eb5bc0fa15728aa4a3bf2efccb7852187ca7b77f3bfdc8df6ccc"} Jan 30 11:20:47 crc kubenswrapper[4869]: I0130 11:20:47.243019 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nngtl" event={"ID":"dd76d919-f21a-458d-9903-ea3180418753","Type":"ContainerStarted","Data":"1e5f3efcc43e17fd38b19a4627fb7d2c0a0f464f011b308087b26c829716b2da"} Jan 30 11:20:47 crc kubenswrapper[4869]: I0130 11:20:47.263336 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nngtl" podStartSLOduration=1.894657783 podStartE2EDuration="3.263313567s" podCreationTimestamp="2026-01-30 11:20:44 +0000 UTC" firstStartedPulling="2026-01-30 11:20:45.222515672 +0000 UTC m=+1595.772391738" lastFinishedPulling="2026-01-30 11:20:46.591171456 +0000 UTC m=+1597.141047522" observedRunningTime="2026-01-30 11:20:47.258740127 +0000 UTC m=+1597.808616223" watchObservedRunningTime="2026-01-30 11:20:47.263313567 +0000 UTC m=+1597.813189643" Jan 30 11:20:47 crc kubenswrapper[4869]: I0130 11:20:47.512744 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gl6ts" Jan 30 11:20:47 crc kubenswrapper[4869]: I0130 11:20:47.673325 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e68c683-86bf-47fe-887f-289d72c93b1c-utilities\") pod \"3e68c683-86bf-47fe-887f-289d72c93b1c\" (UID: \"3e68c683-86bf-47fe-887f-289d72c93b1c\") " Jan 30 11:20:47 crc kubenswrapper[4869]: I0130 11:20:47.673466 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hzf7t\" (UniqueName: \"kubernetes.io/projected/3e68c683-86bf-47fe-887f-289d72c93b1c-kube-api-access-hzf7t\") pod \"3e68c683-86bf-47fe-887f-289d72c93b1c\" (UID: \"3e68c683-86bf-47fe-887f-289d72c93b1c\") " Jan 30 11:20:47 crc kubenswrapper[4869]: I0130 11:20:47.673586 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e68c683-86bf-47fe-887f-289d72c93b1c-catalog-content\") pod \"3e68c683-86bf-47fe-887f-289d72c93b1c\" (UID: \"3e68c683-86bf-47fe-887f-289d72c93b1c\") " Jan 30 11:20:47 crc kubenswrapper[4869]: I0130 11:20:47.674467 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e68c683-86bf-47fe-887f-289d72c93b1c-utilities" (OuterVolumeSpecName: "utilities") pod "3e68c683-86bf-47fe-887f-289d72c93b1c" (UID: "3e68c683-86bf-47fe-887f-289d72c93b1c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:20:47 crc kubenswrapper[4869]: I0130 11:20:47.679526 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e68c683-86bf-47fe-887f-289d72c93b1c-kube-api-access-hzf7t" (OuterVolumeSpecName: "kube-api-access-hzf7t") pod "3e68c683-86bf-47fe-887f-289d72c93b1c" (UID: "3e68c683-86bf-47fe-887f-289d72c93b1c"). InnerVolumeSpecName "kube-api-access-hzf7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:20:47 crc kubenswrapper[4869]: I0130 11:20:47.732592 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e68c683-86bf-47fe-887f-289d72c93b1c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3e68c683-86bf-47fe-887f-289d72c93b1c" (UID: "3e68c683-86bf-47fe-887f-289d72c93b1c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:20:47 crc kubenswrapper[4869]: I0130 11:20:47.774861 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e68c683-86bf-47fe-887f-289d72c93b1c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 11:20:47 crc kubenswrapper[4869]: I0130 11:20:47.774897 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e68c683-86bf-47fe-887f-289d72c93b1c-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 11:20:47 crc kubenswrapper[4869]: I0130 11:20:47.774909 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hzf7t\" (UniqueName: \"kubernetes.io/projected/3e68c683-86bf-47fe-887f-289d72c93b1c-kube-api-access-hzf7t\") on node \"crc\" DevicePath \"\"" Jan 30 11:20:48 crc kubenswrapper[4869]: I0130 11:20:48.252507 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gl6ts" event={"ID":"3e68c683-86bf-47fe-887f-289d72c93b1c","Type":"ContainerDied","Data":"0afb624ad113e3cf8fc0b1d98e0990463e26d6392182ea09eaa469935870a3a8"} Jan 30 11:20:48 crc kubenswrapper[4869]: I0130 11:20:48.252876 4869 scope.go:117] "RemoveContainer" containerID="61f8b5a062a9eb5bc0fa15728aa4a3bf2efccb7852187ca7b77f3bfdc8df6ccc" Jan 30 11:20:48 crc kubenswrapper[4869]: I0130 11:20:48.252576 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gl6ts" Jan 30 11:20:48 crc kubenswrapper[4869]: I0130 11:20:48.275894 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gl6ts"] Jan 30 11:20:48 crc kubenswrapper[4869]: I0130 11:20:48.278104 4869 scope.go:117] "RemoveContainer" containerID="69701c87e5a2ab134cf47f619c097e3982e81ad7471a2b801a35ac6830df0090" Jan 30 11:20:48 crc kubenswrapper[4869]: I0130 11:20:48.292665 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gl6ts"] Jan 30 11:20:48 crc kubenswrapper[4869]: I0130 11:20:48.296512 4869 scope.go:117] "RemoveContainer" containerID="b43ada421df947fac5ed4dc0172df2fec32456d868e31a3c8d678b4306be10dc" Jan 30 11:20:50 crc kubenswrapper[4869]: I0130 11:20:50.140842 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e68c683-86bf-47fe-887f-289d72c93b1c" path="/var/lib/kubelet/pods/3e68c683-86bf-47fe-887f-289d72c93b1c/volumes" Jan 30 11:20:54 crc kubenswrapper[4869]: I0130 11:20:54.374642 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nngtl" Jan 30 11:20:54 crc kubenswrapper[4869]: I0130 11:20:54.374997 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nngtl" Jan 30 11:20:54 crc kubenswrapper[4869]: I0130 11:20:54.431335 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nngtl" Jan 30 11:20:55 crc kubenswrapper[4869]: I0130 11:20:55.340629 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nngtl" Jan 30 11:20:55 crc kubenswrapper[4869]: I0130 11:20:55.389405 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nngtl"] Jan 30 11:20:57 crc kubenswrapper[4869]: I0130 11:20:57.320172 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nngtl" podUID="dd76d919-f21a-458d-9903-ea3180418753" containerName="registry-server" containerID="cri-o://1e5f3efcc43e17fd38b19a4627fb7d2c0a0f464f011b308087b26c829716b2da" gracePeriod=2 Jan 30 11:20:57 crc kubenswrapper[4869]: I0130 11:20:57.703192 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nngtl" Jan 30 11:20:57 crc kubenswrapper[4869]: I0130 11:20:57.815555 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd76d919-f21a-458d-9903-ea3180418753-utilities\") pod \"dd76d919-f21a-458d-9903-ea3180418753\" (UID: \"dd76d919-f21a-458d-9903-ea3180418753\") " Jan 30 11:20:57 crc kubenswrapper[4869]: I0130 11:20:57.815608 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ftql2\" (UniqueName: \"kubernetes.io/projected/dd76d919-f21a-458d-9903-ea3180418753-kube-api-access-ftql2\") pod \"dd76d919-f21a-458d-9903-ea3180418753\" (UID: \"dd76d919-f21a-458d-9903-ea3180418753\") " Jan 30 11:20:57 crc kubenswrapper[4869]: I0130 11:20:57.815685 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd76d919-f21a-458d-9903-ea3180418753-catalog-content\") pod \"dd76d919-f21a-458d-9903-ea3180418753\" (UID: \"dd76d919-f21a-458d-9903-ea3180418753\") " Jan 30 11:20:57 crc kubenswrapper[4869]: I0130 11:20:57.816657 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd76d919-f21a-458d-9903-ea3180418753-utilities" (OuterVolumeSpecName: "utilities") pod "dd76d919-f21a-458d-9903-ea3180418753" (UID: "dd76d919-f21a-458d-9903-ea3180418753"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:20:57 crc kubenswrapper[4869]: I0130 11:20:57.821622 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd76d919-f21a-458d-9903-ea3180418753-kube-api-access-ftql2" (OuterVolumeSpecName: "kube-api-access-ftql2") pod "dd76d919-f21a-458d-9903-ea3180418753" (UID: "dd76d919-f21a-458d-9903-ea3180418753"). InnerVolumeSpecName "kube-api-access-ftql2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:20:57 crc kubenswrapper[4869]: I0130 11:20:57.840903 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd76d919-f21a-458d-9903-ea3180418753-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dd76d919-f21a-458d-9903-ea3180418753" (UID: "dd76d919-f21a-458d-9903-ea3180418753"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:20:57 crc kubenswrapper[4869]: I0130 11:20:57.917011 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd76d919-f21a-458d-9903-ea3180418753-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 11:20:57 crc kubenswrapper[4869]: I0130 11:20:57.917271 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ftql2\" (UniqueName: \"kubernetes.io/projected/dd76d919-f21a-458d-9903-ea3180418753-kube-api-access-ftql2\") on node \"crc\" DevicePath \"\"" Jan 30 11:20:57 crc kubenswrapper[4869]: I0130 11:20:57.917421 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd76d919-f21a-458d-9903-ea3180418753-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 11:20:58 crc kubenswrapper[4869]: I0130 11:20:58.329433 4869 generic.go:334] "Generic (PLEG): container finished" podID="dd76d919-f21a-458d-9903-ea3180418753" containerID="1e5f3efcc43e17fd38b19a4627fb7d2c0a0f464f011b308087b26c829716b2da" exitCode=0 Jan 30 11:20:58 crc kubenswrapper[4869]: I0130 11:20:58.329485 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nngtl" event={"ID":"dd76d919-f21a-458d-9903-ea3180418753","Type":"ContainerDied","Data":"1e5f3efcc43e17fd38b19a4627fb7d2c0a0f464f011b308087b26c829716b2da"} Jan 30 11:20:58 crc kubenswrapper[4869]: I0130 11:20:58.329509 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nngtl" Jan 30 11:20:58 crc kubenswrapper[4869]: I0130 11:20:58.329529 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nngtl" event={"ID":"dd76d919-f21a-458d-9903-ea3180418753","Type":"ContainerDied","Data":"72f1efbda42ad4173a07d3ef7ccd5739c38723117f9e5724d26c204dbe5eb24f"} Jan 30 11:20:58 crc kubenswrapper[4869]: I0130 11:20:58.329553 4869 scope.go:117] "RemoveContainer" containerID="1e5f3efcc43e17fd38b19a4627fb7d2c0a0f464f011b308087b26c829716b2da" Jan 30 11:20:58 crc kubenswrapper[4869]: I0130 11:20:58.352931 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nngtl"] Jan 30 11:20:58 crc kubenswrapper[4869]: I0130 11:20:58.358979 4869 scope.go:117] "RemoveContainer" containerID="333c5eebc4986481b573bec2216805f74f213009201217b4f8f2bba0fc8944b5" Jan 30 11:20:58 crc kubenswrapper[4869]: I0130 11:20:58.359703 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nngtl"] Jan 30 11:20:58 crc kubenswrapper[4869]: I0130 11:20:58.374840 4869 scope.go:117] "RemoveContainer" containerID="f4e6176058b0049f1c750fd195172a0e21952d2d10e9e7da049b256604d83450" Jan 30 11:20:58 crc kubenswrapper[4869]: I0130 11:20:58.397229 4869 scope.go:117] "RemoveContainer" containerID="1e5f3efcc43e17fd38b19a4627fb7d2c0a0f464f011b308087b26c829716b2da" Jan 30 11:20:58 crc kubenswrapper[4869]: E0130 11:20:58.397759 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e5f3efcc43e17fd38b19a4627fb7d2c0a0f464f011b308087b26c829716b2da\": container with ID starting with 1e5f3efcc43e17fd38b19a4627fb7d2c0a0f464f011b308087b26c829716b2da not found: ID does not exist" containerID="1e5f3efcc43e17fd38b19a4627fb7d2c0a0f464f011b308087b26c829716b2da" Jan 30 11:20:58 crc kubenswrapper[4869]: I0130 11:20:58.397812 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e5f3efcc43e17fd38b19a4627fb7d2c0a0f464f011b308087b26c829716b2da"} err="failed to get container status \"1e5f3efcc43e17fd38b19a4627fb7d2c0a0f464f011b308087b26c829716b2da\": rpc error: code = NotFound desc = could not find container \"1e5f3efcc43e17fd38b19a4627fb7d2c0a0f464f011b308087b26c829716b2da\": container with ID starting with 1e5f3efcc43e17fd38b19a4627fb7d2c0a0f464f011b308087b26c829716b2da not found: ID does not exist" Jan 30 11:20:58 crc kubenswrapper[4869]: I0130 11:20:58.397842 4869 scope.go:117] "RemoveContainer" containerID="333c5eebc4986481b573bec2216805f74f213009201217b4f8f2bba0fc8944b5" Jan 30 11:20:58 crc kubenswrapper[4869]: E0130 11:20:58.398153 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"333c5eebc4986481b573bec2216805f74f213009201217b4f8f2bba0fc8944b5\": container with ID starting with 333c5eebc4986481b573bec2216805f74f213009201217b4f8f2bba0fc8944b5 not found: ID does not exist" containerID="333c5eebc4986481b573bec2216805f74f213009201217b4f8f2bba0fc8944b5" Jan 30 11:20:58 crc kubenswrapper[4869]: I0130 11:20:58.398195 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"333c5eebc4986481b573bec2216805f74f213009201217b4f8f2bba0fc8944b5"} err="failed to get container status \"333c5eebc4986481b573bec2216805f74f213009201217b4f8f2bba0fc8944b5\": rpc error: code = NotFound desc = could not find container \"333c5eebc4986481b573bec2216805f74f213009201217b4f8f2bba0fc8944b5\": container with ID starting with 333c5eebc4986481b573bec2216805f74f213009201217b4f8f2bba0fc8944b5 not found: ID does not exist" Jan 30 11:20:58 crc kubenswrapper[4869]: I0130 11:20:58.398224 4869 scope.go:117] "RemoveContainer" containerID="f4e6176058b0049f1c750fd195172a0e21952d2d10e9e7da049b256604d83450" Jan 30 11:20:58 crc kubenswrapper[4869]: E0130 11:20:58.398498 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4e6176058b0049f1c750fd195172a0e21952d2d10e9e7da049b256604d83450\": container with ID starting with f4e6176058b0049f1c750fd195172a0e21952d2d10e9e7da049b256604d83450 not found: ID does not exist" containerID="f4e6176058b0049f1c750fd195172a0e21952d2d10e9e7da049b256604d83450" Jan 30 11:20:58 crc kubenswrapper[4869]: I0130 11:20:58.398528 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4e6176058b0049f1c750fd195172a0e21952d2d10e9e7da049b256604d83450"} err="failed to get container status \"f4e6176058b0049f1c750fd195172a0e21952d2d10e9e7da049b256604d83450\": rpc error: code = NotFound desc = could not find container \"f4e6176058b0049f1c750fd195172a0e21952d2d10e9e7da049b256604d83450\": container with ID starting with f4e6176058b0049f1c750fd195172a0e21952d2d10e9e7da049b256604d83450 not found: ID does not exist" Jan 30 11:21:00 crc kubenswrapper[4869]: I0130 11:21:00.142514 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd76d919-f21a-458d-9903-ea3180418753" path="/var/lib/kubelet/pods/dd76d919-f21a-458d-9903-ea3180418753/volumes" Jan 30 11:21:14 crc kubenswrapper[4869]: I0130 11:21:14.159965 4869 scope.go:117] "RemoveContainer" containerID="2df8b1eba74a41c2b063c8fbcf197a06c9966c6a5ba096d492ead9e41ab7aa2e" Jan 30 11:21:14 crc kubenswrapper[4869]: I0130 11:21:14.180058 4869 scope.go:117] "RemoveContainer" containerID="c37c781ec6bbb85eee14167c524d88c4bc0c5851de4283fda9feb6b69d175421" Jan 30 11:21:14 crc kubenswrapper[4869]: I0130 11:21:14.198354 4869 scope.go:117] "RemoveContainer" containerID="d22acd0e37c1d16b48642ba07101001e0b5da4e0b87c4c3ff79fd376c6a9907a" Jan 30 11:21:14 crc kubenswrapper[4869]: I0130 11:21:14.214977 4869 scope.go:117] "RemoveContainer" containerID="b9690da5434b6d4146f8eab01da1057397213e30a8e072731befa7042dbba543" Jan 30 11:21:14 crc kubenswrapper[4869]: I0130 11:21:14.231310 4869 scope.go:117] "RemoveContainer" containerID="e4c4918f5662f8b540d7b476d1bcefa42040a6d2343b28c5333643103e016655" Jan 30 11:21:14 crc kubenswrapper[4869]: I0130 11:21:14.245907 4869 scope.go:117] "RemoveContainer" containerID="27b5ea189bd4822ad549497c90e0af47c89f4bcaf3a147f407784ea7d6d2c6c9" Jan 30 11:21:14 crc kubenswrapper[4869]: I0130 11:21:14.261695 4869 scope.go:117] "RemoveContainer" containerID="e85400675ab49c958a87fc830f35df0dee7d933272c59fcdb8ba4741cb390fd8" Jan 30 11:21:14 crc kubenswrapper[4869]: I0130 11:21:14.282067 4869 scope.go:117] "RemoveContainer" containerID="e54d60216b7cd4ff9bf216e525d3bff77639b9e99c69e0a57242e96c6750aee3" Jan 30 11:21:14 crc kubenswrapper[4869]: I0130 11:21:14.322122 4869 scope.go:117] "RemoveContainer" containerID="e62e2bc94f8e1379e29cdd03d3712ba5ca06e49d50685909a4a864d05f0bc5c7" Jan 30 11:21:14 crc kubenswrapper[4869]: I0130 11:21:14.364988 4869 scope.go:117] "RemoveContainer" containerID="e46ec962f21b84f5634e321b99b8413f420ec0f1c647a411f5941754d794789c" Jan 30 11:21:21 crc kubenswrapper[4869]: I0130 11:21:21.769536 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:21:21 crc kubenswrapper[4869]: I0130 11:21:21.770397 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:21:51 crc kubenswrapper[4869]: I0130 11:21:51.769398 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:21:51 crc kubenswrapper[4869]: I0130 11:21:51.770025 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:22:14 crc kubenswrapper[4869]: I0130 11:22:14.476048 4869 scope.go:117] "RemoveContainer" containerID="dcbc7f36a647ccce1c4f9cac0b03cc3ed28f4b6d411a2de239d056af9abe8648" Jan 30 11:22:14 crc kubenswrapper[4869]: I0130 11:22:14.511037 4869 scope.go:117] "RemoveContainer" containerID="6f648066bc5ecfa8412c587c889d17f50834350c9a5cabdd2ecd078d9aba434c" Jan 30 11:22:14 crc kubenswrapper[4869]: I0130 11:22:14.557990 4869 scope.go:117] "RemoveContainer" containerID="d6f67b2e1962982646c99be9310fe46368582436c0f28ba1d79bd9af395475fc" Jan 30 11:22:14 crc kubenswrapper[4869]: I0130 11:22:14.587180 4869 scope.go:117] "RemoveContainer" containerID="5edd9111518ad32c4899c1b6d327c4bda2bc8c9521a53b6ca409e82447732919" Jan 30 11:22:21 crc kubenswrapper[4869]: I0130 11:22:21.769211 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:22:21 crc kubenswrapper[4869]: I0130 11:22:21.769762 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:22:21 crc kubenswrapper[4869]: I0130 11:22:21.770038 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 11:22:21 crc kubenswrapper[4869]: I0130 11:22:21.770688 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61"} pod="openshift-machine-config-operator/machine-config-daemon-99lr2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 11:22:21 crc kubenswrapper[4869]: I0130 11:22:21.770808 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" containerID="cri-o://9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" gracePeriod=600 Jan 30 11:22:21 crc kubenswrapper[4869]: E0130 11:22:21.894813 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:22:21 crc kubenswrapper[4869]: I0130 11:22:21.915156 4869 generic.go:334] "Generic (PLEG): container finished" podID="ef13186b-7f82-4025-97e3-d899be8c207f" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" exitCode=0 Jan 30 11:22:21 crc kubenswrapper[4869]: I0130 11:22:21.915209 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerDied","Data":"9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61"} Jan 30 11:22:21 crc kubenswrapper[4869]: I0130 11:22:21.915259 4869 scope.go:117] "RemoveContainer" containerID="cabcf877f44bd0de25c7db6df0dc9b22d4324a2881d9c97569218164d3a2997b" Jan 30 11:22:21 crc kubenswrapper[4869]: I0130 11:22:21.915864 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:22:21 crc kubenswrapper[4869]: E0130 11:22:21.916160 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:22:34 crc kubenswrapper[4869]: I0130 11:22:34.132664 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:22:34 crc kubenswrapper[4869]: E0130 11:22:34.133425 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:22:48 crc kubenswrapper[4869]: I0130 11:22:48.132889 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:22:48 crc kubenswrapper[4869]: E0130 11:22:48.133685 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:22:59 crc kubenswrapper[4869]: I0130 11:22:59.133619 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:22:59 crc kubenswrapper[4869]: E0130 11:22:59.134437 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:23:10 crc kubenswrapper[4869]: I0130 11:23:10.153400 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cz5mr"] Jan 30 11:23:10 crc kubenswrapper[4869]: E0130 11:23:10.154434 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd76d919-f21a-458d-9903-ea3180418753" containerName="extract-content" Jan 30 11:23:10 crc kubenswrapper[4869]: I0130 11:23:10.154476 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd76d919-f21a-458d-9903-ea3180418753" containerName="extract-content" Jan 30 11:23:10 crc kubenswrapper[4869]: E0130 11:23:10.154498 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e68c683-86bf-47fe-887f-289d72c93b1c" containerName="extract-utilities" Jan 30 11:23:10 crc kubenswrapper[4869]: I0130 11:23:10.154508 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e68c683-86bf-47fe-887f-289d72c93b1c" containerName="extract-utilities" Jan 30 11:23:10 crc kubenswrapper[4869]: E0130 11:23:10.154528 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e68c683-86bf-47fe-887f-289d72c93b1c" containerName="registry-server" Jan 30 11:23:10 crc kubenswrapper[4869]: I0130 11:23:10.154561 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e68c683-86bf-47fe-887f-289d72c93b1c" containerName="registry-server" Jan 30 11:23:10 crc kubenswrapper[4869]: E0130 11:23:10.154586 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd76d919-f21a-458d-9903-ea3180418753" containerName="registry-server" Jan 30 11:23:10 crc kubenswrapper[4869]: I0130 11:23:10.154593 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd76d919-f21a-458d-9903-ea3180418753" containerName="registry-server" Jan 30 11:23:10 crc kubenswrapper[4869]: E0130 11:23:10.154609 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e68c683-86bf-47fe-887f-289d72c93b1c" containerName="extract-content" Jan 30 11:23:10 crc kubenswrapper[4869]: I0130 11:23:10.154615 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e68c683-86bf-47fe-887f-289d72c93b1c" containerName="extract-content" Jan 30 11:23:10 crc kubenswrapper[4869]: E0130 11:23:10.154635 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd76d919-f21a-458d-9903-ea3180418753" containerName="extract-utilities" Jan 30 11:23:10 crc kubenswrapper[4869]: I0130 11:23:10.154643 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd76d919-f21a-458d-9903-ea3180418753" containerName="extract-utilities" Jan 30 11:23:10 crc kubenswrapper[4869]: I0130 11:23:10.154815 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e68c683-86bf-47fe-887f-289d72c93b1c" containerName="registry-server" Jan 30 11:23:10 crc kubenswrapper[4869]: I0130 11:23:10.154840 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd76d919-f21a-458d-9903-ea3180418753" containerName="registry-server" Jan 30 11:23:10 crc kubenswrapper[4869]: I0130 11:23:10.158420 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cz5mr" Jan 30 11:23:10 crc kubenswrapper[4869]: I0130 11:23:10.171733 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cz5mr"] Jan 30 11:23:10 crc kubenswrapper[4869]: I0130 11:23:10.274179 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7330d80c-b370-46c3-a7c0-479fd36664b0-catalog-content\") pod \"redhat-operators-cz5mr\" (UID: \"7330d80c-b370-46c3-a7c0-479fd36664b0\") " pod="openshift-marketplace/redhat-operators-cz5mr" Jan 30 11:23:10 crc kubenswrapper[4869]: I0130 11:23:10.274288 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ps8rd\" (UniqueName: \"kubernetes.io/projected/7330d80c-b370-46c3-a7c0-479fd36664b0-kube-api-access-ps8rd\") pod \"redhat-operators-cz5mr\" (UID: \"7330d80c-b370-46c3-a7c0-479fd36664b0\") " pod="openshift-marketplace/redhat-operators-cz5mr" Jan 30 11:23:10 crc kubenswrapper[4869]: I0130 11:23:10.274432 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7330d80c-b370-46c3-a7c0-479fd36664b0-utilities\") pod \"redhat-operators-cz5mr\" (UID: \"7330d80c-b370-46c3-a7c0-479fd36664b0\") " pod="openshift-marketplace/redhat-operators-cz5mr" Jan 30 11:23:10 crc kubenswrapper[4869]: I0130 11:23:10.376938 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ps8rd\" (UniqueName: \"kubernetes.io/projected/7330d80c-b370-46c3-a7c0-479fd36664b0-kube-api-access-ps8rd\") pod \"redhat-operators-cz5mr\" (UID: \"7330d80c-b370-46c3-a7c0-479fd36664b0\") " pod="openshift-marketplace/redhat-operators-cz5mr" Jan 30 11:23:10 crc kubenswrapper[4869]: I0130 11:23:10.377070 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7330d80c-b370-46c3-a7c0-479fd36664b0-utilities\") pod \"redhat-operators-cz5mr\" (UID: \"7330d80c-b370-46c3-a7c0-479fd36664b0\") " pod="openshift-marketplace/redhat-operators-cz5mr" Jan 30 11:23:10 crc kubenswrapper[4869]: I0130 11:23:10.377165 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7330d80c-b370-46c3-a7c0-479fd36664b0-catalog-content\") pod \"redhat-operators-cz5mr\" (UID: \"7330d80c-b370-46c3-a7c0-479fd36664b0\") " pod="openshift-marketplace/redhat-operators-cz5mr" Jan 30 11:23:10 crc kubenswrapper[4869]: I0130 11:23:10.377757 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7330d80c-b370-46c3-a7c0-479fd36664b0-utilities\") pod \"redhat-operators-cz5mr\" (UID: \"7330d80c-b370-46c3-a7c0-479fd36664b0\") " pod="openshift-marketplace/redhat-operators-cz5mr" Jan 30 11:23:10 crc kubenswrapper[4869]: I0130 11:23:10.377935 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7330d80c-b370-46c3-a7c0-479fd36664b0-catalog-content\") pod \"redhat-operators-cz5mr\" (UID: \"7330d80c-b370-46c3-a7c0-479fd36664b0\") " pod="openshift-marketplace/redhat-operators-cz5mr" Jan 30 11:23:10 crc kubenswrapper[4869]: I0130 11:23:10.399984 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ps8rd\" (UniqueName: \"kubernetes.io/projected/7330d80c-b370-46c3-a7c0-479fd36664b0-kube-api-access-ps8rd\") pod \"redhat-operators-cz5mr\" (UID: \"7330d80c-b370-46c3-a7c0-479fd36664b0\") " pod="openshift-marketplace/redhat-operators-cz5mr" Jan 30 11:23:10 crc kubenswrapper[4869]: I0130 11:23:10.493143 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cz5mr" Jan 30 11:23:10 crc kubenswrapper[4869]: I0130 11:23:10.957809 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cz5mr"] Jan 30 11:23:11 crc kubenswrapper[4869]: I0130 11:23:11.132811 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:23:11 crc kubenswrapper[4869]: E0130 11:23:11.133348 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:23:11 crc kubenswrapper[4869]: I0130 11:23:11.257704 4869 generic.go:334] "Generic (PLEG): container finished" podID="7330d80c-b370-46c3-a7c0-479fd36664b0" containerID="2a02e9e11ba115b605d1a33e868b9cb3344760f175af33b948b1fdd2fe3eed66" exitCode=0 Jan 30 11:23:11 crc kubenswrapper[4869]: I0130 11:23:11.257770 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cz5mr" event={"ID":"7330d80c-b370-46c3-a7c0-479fd36664b0","Type":"ContainerDied","Data":"2a02e9e11ba115b605d1a33e868b9cb3344760f175af33b948b1fdd2fe3eed66"} Jan 30 11:23:11 crc kubenswrapper[4869]: I0130 11:23:11.257804 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cz5mr" event={"ID":"7330d80c-b370-46c3-a7c0-479fd36664b0","Type":"ContainerStarted","Data":"9c097c91e7befacea2647082ec535d7c87203b093b0a1ada1bbc12ddeadb7f80"} Jan 30 11:23:12 crc kubenswrapper[4869]: I0130 11:23:12.274166 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cz5mr" event={"ID":"7330d80c-b370-46c3-a7c0-479fd36664b0","Type":"ContainerStarted","Data":"786e1984c2bfda37597dcaa97f958f557774071cbae9eb9e52784554a8ffabfb"} Jan 30 11:23:13 crc kubenswrapper[4869]: I0130 11:23:13.281844 4869 generic.go:334] "Generic (PLEG): container finished" podID="7330d80c-b370-46c3-a7c0-479fd36664b0" containerID="786e1984c2bfda37597dcaa97f958f557774071cbae9eb9e52784554a8ffabfb" exitCode=0 Jan 30 11:23:13 crc kubenswrapper[4869]: I0130 11:23:13.281889 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cz5mr" event={"ID":"7330d80c-b370-46c3-a7c0-479fd36664b0","Type":"ContainerDied","Data":"786e1984c2bfda37597dcaa97f958f557774071cbae9eb9e52784554a8ffabfb"} Jan 30 11:23:14 crc kubenswrapper[4869]: I0130 11:23:14.290834 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cz5mr" event={"ID":"7330d80c-b370-46c3-a7c0-479fd36664b0","Type":"ContainerStarted","Data":"5eed2f4cf68ede5d4ff4c0c3aa3bc7a46a15791647402b2b2e6b3e6220a77c9b"} Jan 30 11:23:14 crc kubenswrapper[4869]: I0130 11:23:14.310910 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cz5mr" podStartSLOduration=1.500910658 podStartE2EDuration="4.310890842s" podCreationTimestamp="2026-01-30 11:23:10 +0000 UTC" firstStartedPulling="2026-01-30 11:23:11.259306742 +0000 UTC m=+1741.809182808" lastFinishedPulling="2026-01-30 11:23:14.069286926 +0000 UTC m=+1744.619162992" observedRunningTime="2026-01-30 11:23:14.30658547 +0000 UTC m=+1744.856461536" watchObservedRunningTime="2026-01-30 11:23:14.310890842 +0000 UTC m=+1744.860766908" Jan 30 11:23:20 crc kubenswrapper[4869]: I0130 11:23:20.493892 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cz5mr" Jan 30 11:23:20 crc kubenswrapper[4869]: I0130 11:23:20.494734 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cz5mr" Jan 30 11:23:20 crc kubenswrapper[4869]: I0130 11:23:20.537809 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cz5mr" Jan 30 11:23:21 crc kubenswrapper[4869]: I0130 11:23:21.377161 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cz5mr" Jan 30 11:23:21 crc kubenswrapper[4869]: I0130 11:23:21.419686 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cz5mr"] Jan 30 11:23:22 crc kubenswrapper[4869]: I0130 11:23:22.133144 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:23:22 crc kubenswrapper[4869]: E0130 11:23:22.133439 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:23:23 crc kubenswrapper[4869]: I0130 11:23:23.351107 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cz5mr" podUID="7330d80c-b370-46c3-a7c0-479fd36664b0" containerName="registry-server" containerID="cri-o://5eed2f4cf68ede5d4ff4c0c3aa3bc7a46a15791647402b2b2e6b3e6220a77c9b" gracePeriod=2 Jan 30 11:23:24 crc kubenswrapper[4869]: I0130 11:23:24.361285 4869 generic.go:334] "Generic (PLEG): container finished" podID="7330d80c-b370-46c3-a7c0-479fd36664b0" containerID="5eed2f4cf68ede5d4ff4c0c3aa3bc7a46a15791647402b2b2e6b3e6220a77c9b" exitCode=0 Jan 30 11:23:24 crc kubenswrapper[4869]: I0130 11:23:24.361329 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cz5mr" event={"ID":"7330d80c-b370-46c3-a7c0-479fd36664b0","Type":"ContainerDied","Data":"5eed2f4cf68ede5d4ff4c0c3aa3bc7a46a15791647402b2b2e6b3e6220a77c9b"} Jan 30 11:23:24 crc kubenswrapper[4869]: I0130 11:23:24.804183 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cz5mr" Jan 30 11:23:24 crc kubenswrapper[4869]: I0130 11:23:24.890296 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ps8rd\" (UniqueName: \"kubernetes.io/projected/7330d80c-b370-46c3-a7c0-479fd36664b0-kube-api-access-ps8rd\") pod \"7330d80c-b370-46c3-a7c0-479fd36664b0\" (UID: \"7330d80c-b370-46c3-a7c0-479fd36664b0\") " Jan 30 11:23:24 crc kubenswrapper[4869]: I0130 11:23:24.890444 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7330d80c-b370-46c3-a7c0-479fd36664b0-utilities\") pod \"7330d80c-b370-46c3-a7c0-479fd36664b0\" (UID: \"7330d80c-b370-46c3-a7c0-479fd36664b0\") " Jan 30 11:23:24 crc kubenswrapper[4869]: I0130 11:23:24.890496 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7330d80c-b370-46c3-a7c0-479fd36664b0-catalog-content\") pod \"7330d80c-b370-46c3-a7c0-479fd36664b0\" (UID: \"7330d80c-b370-46c3-a7c0-479fd36664b0\") " Jan 30 11:23:24 crc kubenswrapper[4869]: I0130 11:23:24.891820 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7330d80c-b370-46c3-a7c0-479fd36664b0-utilities" (OuterVolumeSpecName: "utilities") pod "7330d80c-b370-46c3-a7c0-479fd36664b0" (UID: "7330d80c-b370-46c3-a7c0-479fd36664b0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:23:24 crc kubenswrapper[4869]: I0130 11:23:24.895421 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7330d80c-b370-46c3-a7c0-479fd36664b0-kube-api-access-ps8rd" (OuterVolumeSpecName: "kube-api-access-ps8rd") pod "7330d80c-b370-46c3-a7c0-479fd36664b0" (UID: "7330d80c-b370-46c3-a7c0-479fd36664b0"). InnerVolumeSpecName "kube-api-access-ps8rd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:23:24 crc kubenswrapper[4869]: I0130 11:23:24.992273 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7330d80c-b370-46c3-a7c0-479fd36664b0-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 11:23:24 crc kubenswrapper[4869]: I0130 11:23:24.992301 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ps8rd\" (UniqueName: \"kubernetes.io/projected/7330d80c-b370-46c3-a7c0-479fd36664b0-kube-api-access-ps8rd\") on node \"crc\" DevicePath \"\"" Jan 30 11:23:25 crc kubenswrapper[4869]: I0130 11:23:25.020461 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7330d80c-b370-46c3-a7c0-479fd36664b0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7330d80c-b370-46c3-a7c0-479fd36664b0" (UID: "7330d80c-b370-46c3-a7c0-479fd36664b0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:23:25 crc kubenswrapper[4869]: I0130 11:23:25.093429 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7330d80c-b370-46c3-a7c0-479fd36664b0-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 11:23:25 crc kubenswrapper[4869]: I0130 11:23:25.377038 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cz5mr" event={"ID":"7330d80c-b370-46c3-a7c0-479fd36664b0","Type":"ContainerDied","Data":"9c097c91e7befacea2647082ec535d7c87203b093b0a1ada1bbc12ddeadb7f80"} Jan 30 11:23:25 crc kubenswrapper[4869]: I0130 11:23:25.377100 4869 scope.go:117] "RemoveContainer" containerID="5eed2f4cf68ede5d4ff4c0c3aa3bc7a46a15791647402b2b2e6b3e6220a77c9b" Jan 30 11:23:25 crc kubenswrapper[4869]: I0130 11:23:25.377139 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cz5mr" Jan 30 11:23:25 crc kubenswrapper[4869]: I0130 11:23:25.394381 4869 scope.go:117] "RemoveContainer" containerID="786e1984c2bfda37597dcaa97f958f557774071cbae9eb9e52784554a8ffabfb" Jan 30 11:23:25 crc kubenswrapper[4869]: I0130 11:23:25.413771 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cz5mr"] Jan 30 11:23:25 crc kubenswrapper[4869]: I0130 11:23:25.419146 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cz5mr"] Jan 30 11:23:25 crc kubenswrapper[4869]: I0130 11:23:25.423960 4869 scope.go:117] "RemoveContainer" containerID="2a02e9e11ba115b605d1a33e868b9cb3344760f175af33b948b1fdd2fe3eed66" Jan 30 11:23:26 crc kubenswrapper[4869]: I0130 11:23:26.142344 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7330d80c-b370-46c3-a7c0-479fd36664b0" path="/var/lib/kubelet/pods/7330d80c-b370-46c3-a7c0-479fd36664b0/volumes" Jan 30 11:23:35 crc kubenswrapper[4869]: I0130 11:23:35.132968 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:23:35 crc kubenswrapper[4869]: E0130 11:23:35.133792 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:23:48 crc kubenswrapper[4869]: I0130 11:23:48.133314 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:23:48 crc kubenswrapper[4869]: E0130 11:23:48.133985 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:24:01 crc kubenswrapper[4869]: I0130 11:24:01.132528 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:24:01 crc kubenswrapper[4869]: E0130 11:24:01.133267 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:24:14 crc kubenswrapper[4869]: I0130 11:24:14.133238 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:24:14 crc kubenswrapper[4869]: E0130 11:24:14.133746 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:24:29 crc kubenswrapper[4869]: I0130 11:24:29.132908 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:24:29 crc kubenswrapper[4869]: E0130 11:24:29.133741 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:24:41 crc kubenswrapper[4869]: I0130 11:24:41.134130 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:24:41 crc kubenswrapper[4869]: E0130 11:24:41.135303 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:24:54 crc kubenswrapper[4869]: I0130 11:24:54.132980 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:24:54 crc kubenswrapper[4869]: E0130 11:24:54.133780 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:25:05 crc kubenswrapper[4869]: I0130 11:25:05.133569 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:25:05 crc kubenswrapper[4869]: E0130 11:25:05.134314 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:25:16 crc kubenswrapper[4869]: I0130 11:25:16.133967 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:25:16 crc kubenswrapper[4869]: E0130 11:25:16.135113 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:25:29 crc kubenswrapper[4869]: I0130 11:25:29.133265 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:25:29 crc kubenswrapper[4869]: E0130 11:25:29.133825 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:25:41 crc kubenswrapper[4869]: I0130 11:25:41.132964 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:25:41 crc kubenswrapper[4869]: E0130 11:25:41.133619 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:25:56 crc kubenswrapper[4869]: I0130 11:25:56.133381 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:25:56 crc kubenswrapper[4869]: E0130 11:25:56.134088 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:26:08 crc kubenswrapper[4869]: I0130 11:26:08.133281 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:26:08 crc kubenswrapper[4869]: E0130 11:26:08.135298 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:26:19 crc kubenswrapper[4869]: I0130 11:26:19.132918 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:26:19 crc kubenswrapper[4869]: E0130 11:26:19.133628 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:26:34 crc kubenswrapper[4869]: I0130 11:26:34.132948 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:26:34 crc kubenswrapper[4869]: E0130 11:26:34.133741 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:26:47 crc kubenswrapper[4869]: I0130 11:26:47.132844 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:26:47 crc kubenswrapper[4869]: E0130 11:26:47.133456 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:26:58 crc kubenswrapper[4869]: I0130 11:26:58.133164 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:26:58 crc kubenswrapper[4869]: E0130 11:26:58.134153 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:27:13 crc kubenswrapper[4869]: I0130 11:27:13.133248 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:27:13 crc kubenswrapper[4869]: E0130 11:27:13.133981 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:27:27 crc kubenswrapper[4869]: I0130 11:27:27.134136 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:27:28 crc kubenswrapper[4869]: I0130 11:27:28.075060 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerStarted","Data":"463507ab061d257eb1b3adce78dea16b402a8685a0d66911924501e5413965d1"} Jan 30 11:27:49 crc kubenswrapper[4869]: I0130 11:27:49.600601 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4zz7g"] Jan 30 11:27:49 crc kubenswrapper[4869]: E0130 11:27:49.601738 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7330d80c-b370-46c3-a7c0-479fd36664b0" containerName="registry-server" Jan 30 11:27:49 crc kubenswrapper[4869]: I0130 11:27:49.601757 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7330d80c-b370-46c3-a7c0-479fd36664b0" containerName="registry-server" Jan 30 11:27:49 crc kubenswrapper[4869]: E0130 11:27:49.601784 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7330d80c-b370-46c3-a7c0-479fd36664b0" containerName="extract-content" Jan 30 11:27:49 crc kubenswrapper[4869]: I0130 11:27:49.601793 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7330d80c-b370-46c3-a7c0-479fd36664b0" containerName="extract-content" Jan 30 11:27:49 crc kubenswrapper[4869]: E0130 11:27:49.601810 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7330d80c-b370-46c3-a7c0-479fd36664b0" containerName="extract-utilities" Jan 30 11:27:49 crc kubenswrapper[4869]: I0130 11:27:49.601821 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7330d80c-b370-46c3-a7c0-479fd36664b0" containerName="extract-utilities" Jan 30 11:27:49 crc kubenswrapper[4869]: I0130 11:27:49.602036 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7330d80c-b370-46c3-a7c0-479fd36664b0" containerName="registry-server" Jan 30 11:27:49 crc kubenswrapper[4869]: I0130 11:27:49.603520 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4zz7g" Jan 30 11:27:49 crc kubenswrapper[4869]: I0130 11:27:49.618362 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4zz7g"] Jan 30 11:27:49 crc kubenswrapper[4869]: I0130 11:27:49.712353 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7sqr\" (UniqueName: \"kubernetes.io/projected/ed78e7b2-e5d5-43f4-94b1-372a636192f2-kube-api-access-c7sqr\") pod \"community-operators-4zz7g\" (UID: \"ed78e7b2-e5d5-43f4-94b1-372a636192f2\") " pod="openshift-marketplace/community-operators-4zz7g" Jan 30 11:27:49 crc kubenswrapper[4869]: I0130 11:27:49.712754 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed78e7b2-e5d5-43f4-94b1-372a636192f2-catalog-content\") pod \"community-operators-4zz7g\" (UID: \"ed78e7b2-e5d5-43f4-94b1-372a636192f2\") " pod="openshift-marketplace/community-operators-4zz7g" Jan 30 11:27:49 crc kubenswrapper[4869]: I0130 11:27:49.712796 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed78e7b2-e5d5-43f4-94b1-372a636192f2-utilities\") pod \"community-operators-4zz7g\" (UID: \"ed78e7b2-e5d5-43f4-94b1-372a636192f2\") " pod="openshift-marketplace/community-operators-4zz7g" Jan 30 11:27:49 crc kubenswrapper[4869]: I0130 11:27:49.813769 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7sqr\" (UniqueName: \"kubernetes.io/projected/ed78e7b2-e5d5-43f4-94b1-372a636192f2-kube-api-access-c7sqr\") pod \"community-operators-4zz7g\" (UID: \"ed78e7b2-e5d5-43f4-94b1-372a636192f2\") " pod="openshift-marketplace/community-operators-4zz7g" Jan 30 11:27:49 crc kubenswrapper[4869]: I0130 11:27:49.813838 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed78e7b2-e5d5-43f4-94b1-372a636192f2-catalog-content\") pod \"community-operators-4zz7g\" (UID: \"ed78e7b2-e5d5-43f4-94b1-372a636192f2\") " pod="openshift-marketplace/community-operators-4zz7g" Jan 30 11:27:49 crc kubenswrapper[4869]: I0130 11:27:49.813865 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed78e7b2-e5d5-43f4-94b1-372a636192f2-utilities\") pod \"community-operators-4zz7g\" (UID: \"ed78e7b2-e5d5-43f4-94b1-372a636192f2\") " pod="openshift-marketplace/community-operators-4zz7g" Jan 30 11:27:49 crc kubenswrapper[4869]: I0130 11:27:49.814370 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed78e7b2-e5d5-43f4-94b1-372a636192f2-utilities\") pod \"community-operators-4zz7g\" (UID: \"ed78e7b2-e5d5-43f4-94b1-372a636192f2\") " pod="openshift-marketplace/community-operators-4zz7g" Jan 30 11:27:49 crc kubenswrapper[4869]: I0130 11:27:49.814492 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed78e7b2-e5d5-43f4-94b1-372a636192f2-catalog-content\") pod \"community-operators-4zz7g\" (UID: \"ed78e7b2-e5d5-43f4-94b1-372a636192f2\") " pod="openshift-marketplace/community-operators-4zz7g" Jan 30 11:27:49 crc kubenswrapper[4869]: I0130 11:27:49.834636 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7sqr\" (UniqueName: \"kubernetes.io/projected/ed78e7b2-e5d5-43f4-94b1-372a636192f2-kube-api-access-c7sqr\") pod \"community-operators-4zz7g\" (UID: \"ed78e7b2-e5d5-43f4-94b1-372a636192f2\") " pod="openshift-marketplace/community-operators-4zz7g" Jan 30 11:27:49 crc kubenswrapper[4869]: I0130 11:27:49.937067 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4zz7g" Jan 30 11:27:50 crc kubenswrapper[4869]: I0130 11:27:50.461338 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4zz7g"] Jan 30 11:27:51 crc kubenswrapper[4869]: I0130 11:27:51.239482 4869 generic.go:334] "Generic (PLEG): container finished" podID="ed78e7b2-e5d5-43f4-94b1-372a636192f2" containerID="280bd16ddf270dbe3178d4efbdeea184469046c866054f4d9e7a99dfc3cf5ff4" exitCode=0 Jan 30 11:27:51 crc kubenswrapper[4869]: I0130 11:27:51.239550 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4zz7g" event={"ID":"ed78e7b2-e5d5-43f4-94b1-372a636192f2","Type":"ContainerDied","Data":"280bd16ddf270dbe3178d4efbdeea184469046c866054f4d9e7a99dfc3cf5ff4"} Jan 30 11:27:51 crc kubenswrapper[4869]: I0130 11:27:51.239586 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4zz7g" event={"ID":"ed78e7b2-e5d5-43f4-94b1-372a636192f2","Type":"ContainerStarted","Data":"573cbf9d764438120f967ff4731ecb9e134ec8e3997ba09dec15ad52b41529c9"} Jan 30 11:27:51 crc kubenswrapper[4869]: I0130 11:27:51.246855 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 11:27:52 crc kubenswrapper[4869]: I0130 11:27:52.247768 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4zz7g" event={"ID":"ed78e7b2-e5d5-43f4-94b1-372a636192f2","Type":"ContainerStarted","Data":"bb2d8d61df39519c4a91f981da7481ce15434a7bfba225453cacf1c52f496205"} Jan 30 11:27:53 crc kubenswrapper[4869]: I0130 11:27:53.258072 4869 generic.go:334] "Generic (PLEG): container finished" podID="ed78e7b2-e5d5-43f4-94b1-372a636192f2" containerID="bb2d8d61df39519c4a91f981da7481ce15434a7bfba225453cacf1c52f496205" exitCode=0 Jan 30 11:27:53 crc kubenswrapper[4869]: I0130 11:27:53.258181 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4zz7g" event={"ID":"ed78e7b2-e5d5-43f4-94b1-372a636192f2","Type":"ContainerDied","Data":"bb2d8d61df39519c4a91f981da7481ce15434a7bfba225453cacf1c52f496205"} Jan 30 11:27:54 crc kubenswrapper[4869]: I0130 11:27:54.266811 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4zz7g" event={"ID":"ed78e7b2-e5d5-43f4-94b1-372a636192f2","Type":"ContainerStarted","Data":"34b64338848ad703923863b415ad074cb61371d987145309f3304c7c9322f6fd"} Jan 30 11:27:54 crc kubenswrapper[4869]: I0130 11:27:54.286583 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4zz7g" podStartSLOduration=2.8519316200000002 podStartE2EDuration="5.286566034s" podCreationTimestamp="2026-01-30 11:27:49 +0000 UTC" firstStartedPulling="2026-01-30 11:27:51.24330706 +0000 UTC m=+2021.793183126" lastFinishedPulling="2026-01-30 11:27:53.677941474 +0000 UTC m=+2024.227817540" observedRunningTime="2026-01-30 11:27:54.284549706 +0000 UTC m=+2024.834425772" watchObservedRunningTime="2026-01-30 11:27:54.286566034 +0000 UTC m=+2024.836442100" Jan 30 11:27:59 crc kubenswrapper[4869]: I0130 11:27:59.937573 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4zz7g" Jan 30 11:27:59 crc kubenswrapper[4869]: I0130 11:27:59.937913 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4zz7g" Jan 30 11:27:59 crc kubenswrapper[4869]: I0130 11:27:59.980011 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4zz7g" Jan 30 11:28:00 crc kubenswrapper[4869]: I0130 11:28:00.345839 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4zz7g" Jan 30 11:28:00 crc kubenswrapper[4869]: I0130 11:28:00.386450 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4zz7g"] Jan 30 11:28:02 crc kubenswrapper[4869]: I0130 11:28:02.324246 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4zz7g" podUID="ed78e7b2-e5d5-43f4-94b1-372a636192f2" containerName="registry-server" containerID="cri-o://34b64338848ad703923863b415ad074cb61371d987145309f3304c7c9322f6fd" gracePeriod=2 Jan 30 11:28:02 crc kubenswrapper[4869]: I0130 11:28:02.718782 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4zz7g" Jan 30 11:28:02 crc kubenswrapper[4869]: I0130 11:28:02.898600 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed78e7b2-e5d5-43f4-94b1-372a636192f2-utilities\") pod \"ed78e7b2-e5d5-43f4-94b1-372a636192f2\" (UID: \"ed78e7b2-e5d5-43f4-94b1-372a636192f2\") " Jan 30 11:28:02 crc kubenswrapper[4869]: I0130 11:28:02.898652 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed78e7b2-e5d5-43f4-94b1-372a636192f2-catalog-content\") pod \"ed78e7b2-e5d5-43f4-94b1-372a636192f2\" (UID: \"ed78e7b2-e5d5-43f4-94b1-372a636192f2\") " Jan 30 11:28:02 crc kubenswrapper[4869]: I0130 11:28:02.898695 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c7sqr\" (UniqueName: \"kubernetes.io/projected/ed78e7b2-e5d5-43f4-94b1-372a636192f2-kube-api-access-c7sqr\") pod \"ed78e7b2-e5d5-43f4-94b1-372a636192f2\" (UID: \"ed78e7b2-e5d5-43f4-94b1-372a636192f2\") " Jan 30 11:28:02 crc kubenswrapper[4869]: I0130 11:28:02.899795 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed78e7b2-e5d5-43f4-94b1-372a636192f2-utilities" (OuterVolumeSpecName: "utilities") pod "ed78e7b2-e5d5-43f4-94b1-372a636192f2" (UID: "ed78e7b2-e5d5-43f4-94b1-372a636192f2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:28:02 crc kubenswrapper[4869]: I0130 11:28:02.908177 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed78e7b2-e5d5-43f4-94b1-372a636192f2-kube-api-access-c7sqr" (OuterVolumeSpecName: "kube-api-access-c7sqr") pod "ed78e7b2-e5d5-43f4-94b1-372a636192f2" (UID: "ed78e7b2-e5d5-43f4-94b1-372a636192f2"). InnerVolumeSpecName "kube-api-access-c7sqr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:28:02 crc kubenswrapper[4869]: I0130 11:28:02.959218 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed78e7b2-e5d5-43f4-94b1-372a636192f2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ed78e7b2-e5d5-43f4-94b1-372a636192f2" (UID: "ed78e7b2-e5d5-43f4-94b1-372a636192f2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:28:03 crc kubenswrapper[4869]: I0130 11:28:03.000449 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed78e7b2-e5d5-43f4-94b1-372a636192f2-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 11:28:03 crc kubenswrapper[4869]: I0130 11:28:03.000516 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed78e7b2-e5d5-43f4-94b1-372a636192f2-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 11:28:03 crc kubenswrapper[4869]: I0130 11:28:03.000539 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c7sqr\" (UniqueName: \"kubernetes.io/projected/ed78e7b2-e5d5-43f4-94b1-372a636192f2-kube-api-access-c7sqr\") on node \"crc\" DevicePath \"\"" Jan 30 11:28:03 crc kubenswrapper[4869]: I0130 11:28:03.333833 4869 generic.go:334] "Generic (PLEG): container finished" podID="ed78e7b2-e5d5-43f4-94b1-372a636192f2" containerID="34b64338848ad703923863b415ad074cb61371d987145309f3304c7c9322f6fd" exitCode=0 Jan 30 11:28:03 crc kubenswrapper[4869]: I0130 11:28:03.333911 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4zz7g" event={"ID":"ed78e7b2-e5d5-43f4-94b1-372a636192f2","Type":"ContainerDied","Data":"34b64338848ad703923863b415ad074cb61371d987145309f3304c7c9322f6fd"} Jan 30 11:28:03 crc kubenswrapper[4869]: I0130 11:28:03.333945 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4zz7g" Jan 30 11:28:03 crc kubenswrapper[4869]: I0130 11:28:03.333976 4869 scope.go:117] "RemoveContainer" containerID="34b64338848ad703923863b415ad074cb61371d987145309f3304c7c9322f6fd" Jan 30 11:28:03 crc kubenswrapper[4869]: I0130 11:28:03.333957 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4zz7g" event={"ID":"ed78e7b2-e5d5-43f4-94b1-372a636192f2","Type":"ContainerDied","Data":"573cbf9d764438120f967ff4731ecb9e134ec8e3997ba09dec15ad52b41529c9"} Jan 30 11:28:03 crc kubenswrapper[4869]: I0130 11:28:03.355629 4869 scope.go:117] "RemoveContainer" containerID="bb2d8d61df39519c4a91f981da7481ce15434a7bfba225453cacf1c52f496205" Jan 30 11:28:03 crc kubenswrapper[4869]: I0130 11:28:03.368870 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4zz7g"] Jan 30 11:28:03 crc kubenswrapper[4869]: I0130 11:28:03.376200 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4zz7g"] Jan 30 11:28:03 crc kubenswrapper[4869]: I0130 11:28:03.395589 4869 scope.go:117] "RemoveContainer" containerID="280bd16ddf270dbe3178d4efbdeea184469046c866054f4d9e7a99dfc3cf5ff4" Jan 30 11:28:03 crc kubenswrapper[4869]: I0130 11:28:03.415753 4869 scope.go:117] "RemoveContainer" containerID="34b64338848ad703923863b415ad074cb61371d987145309f3304c7c9322f6fd" Jan 30 11:28:03 crc kubenswrapper[4869]: E0130 11:28:03.416558 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34b64338848ad703923863b415ad074cb61371d987145309f3304c7c9322f6fd\": container with ID starting with 34b64338848ad703923863b415ad074cb61371d987145309f3304c7c9322f6fd not found: ID does not exist" containerID="34b64338848ad703923863b415ad074cb61371d987145309f3304c7c9322f6fd" Jan 30 11:28:03 crc kubenswrapper[4869]: I0130 11:28:03.416627 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34b64338848ad703923863b415ad074cb61371d987145309f3304c7c9322f6fd"} err="failed to get container status \"34b64338848ad703923863b415ad074cb61371d987145309f3304c7c9322f6fd\": rpc error: code = NotFound desc = could not find container \"34b64338848ad703923863b415ad074cb61371d987145309f3304c7c9322f6fd\": container with ID starting with 34b64338848ad703923863b415ad074cb61371d987145309f3304c7c9322f6fd not found: ID does not exist" Jan 30 11:28:03 crc kubenswrapper[4869]: I0130 11:28:03.416676 4869 scope.go:117] "RemoveContainer" containerID="bb2d8d61df39519c4a91f981da7481ce15434a7bfba225453cacf1c52f496205" Jan 30 11:28:03 crc kubenswrapper[4869]: E0130 11:28:03.419291 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb2d8d61df39519c4a91f981da7481ce15434a7bfba225453cacf1c52f496205\": container with ID starting with bb2d8d61df39519c4a91f981da7481ce15434a7bfba225453cacf1c52f496205 not found: ID does not exist" containerID="bb2d8d61df39519c4a91f981da7481ce15434a7bfba225453cacf1c52f496205" Jan 30 11:28:03 crc kubenswrapper[4869]: I0130 11:28:03.419340 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb2d8d61df39519c4a91f981da7481ce15434a7bfba225453cacf1c52f496205"} err="failed to get container status \"bb2d8d61df39519c4a91f981da7481ce15434a7bfba225453cacf1c52f496205\": rpc error: code = NotFound desc = could not find container \"bb2d8d61df39519c4a91f981da7481ce15434a7bfba225453cacf1c52f496205\": container with ID starting with bb2d8d61df39519c4a91f981da7481ce15434a7bfba225453cacf1c52f496205 not found: ID does not exist" Jan 30 11:28:03 crc kubenswrapper[4869]: I0130 11:28:03.419375 4869 scope.go:117] "RemoveContainer" containerID="280bd16ddf270dbe3178d4efbdeea184469046c866054f4d9e7a99dfc3cf5ff4" Jan 30 11:28:03 crc kubenswrapper[4869]: E0130 11:28:03.420152 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"280bd16ddf270dbe3178d4efbdeea184469046c866054f4d9e7a99dfc3cf5ff4\": container with ID starting with 280bd16ddf270dbe3178d4efbdeea184469046c866054f4d9e7a99dfc3cf5ff4 not found: ID does not exist" containerID="280bd16ddf270dbe3178d4efbdeea184469046c866054f4d9e7a99dfc3cf5ff4" Jan 30 11:28:03 crc kubenswrapper[4869]: I0130 11:28:03.420181 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"280bd16ddf270dbe3178d4efbdeea184469046c866054f4d9e7a99dfc3cf5ff4"} err="failed to get container status \"280bd16ddf270dbe3178d4efbdeea184469046c866054f4d9e7a99dfc3cf5ff4\": rpc error: code = NotFound desc = could not find container \"280bd16ddf270dbe3178d4efbdeea184469046c866054f4d9e7a99dfc3cf5ff4\": container with ID starting with 280bd16ddf270dbe3178d4efbdeea184469046c866054f4d9e7a99dfc3cf5ff4 not found: ID does not exist" Jan 30 11:28:04 crc kubenswrapper[4869]: I0130 11:28:04.143095 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed78e7b2-e5d5-43f4-94b1-372a636192f2" path="/var/lib/kubelet/pods/ed78e7b2-e5d5-43f4-94b1-372a636192f2/volumes" Jan 30 11:29:51 crc kubenswrapper[4869]: I0130 11:29:51.769280 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:29:51 crc kubenswrapper[4869]: I0130 11:29:51.769883 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:30:00 crc kubenswrapper[4869]: I0130 11:30:00.201019 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496210-cc4vr"] Jan 30 11:30:00 crc kubenswrapper[4869]: E0130 11:30:00.201965 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed78e7b2-e5d5-43f4-94b1-372a636192f2" containerName="registry-server" Jan 30 11:30:00 crc kubenswrapper[4869]: I0130 11:30:00.201980 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed78e7b2-e5d5-43f4-94b1-372a636192f2" containerName="registry-server" Jan 30 11:30:00 crc kubenswrapper[4869]: E0130 11:30:00.201991 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed78e7b2-e5d5-43f4-94b1-372a636192f2" containerName="extract-content" Jan 30 11:30:00 crc kubenswrapper[4869]: I0130 11:30:00.201997 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed78e7b2-e5d5-43f4-94b1-372a636192f2" containerName="extract-content" Jan 30 11:30:00 crc kubenswrapper[4869]: E0130 11:30:00.202022 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed78e7b2-e5d5-43f4-94b1-372a636192f2" containerName="extract-utilities" Jan 30 11:30:00 crc kubenswrapper[4869]: I0130 11:30:00.202028 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed78e7b2-e5d5-43f4-94b1-372a636192f2" containerName="extract-utilities" Jan 30 11:30:00 crc kubenswrapper[4869]: I0130 11:30:00.202177 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed78e7b2-e5d5-43f4-94b1-372a636192f2" containerName="registry-server" Jan 30 11:30:00 crc kubenswrapper[4869]: I0130 11:30:00.202684 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496210-cc4vr" Jan 30 11:30:00 crc kubenswrapper[4869]: I0130 11:30:00.205447 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 30 11:30:00 crc kubenswrapper[4869]: I0130 11:30:00.205511 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 30 11:30:00 crc kubenswrapper[4869]: I0130 11:30:00.232103 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496210-cc4vr"] Jan 30 11:30:00 crc kubenswrapper[4869]: I0130 11:30:00.254264 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjglm\" (UniqueName: \"kubernetes.io/projected/53231f02-8fd4-44e9-9181-d6e127dfdd42-kube-api-access-hjglm\") pod \"collect-profiles-29496210-cc4vr\" (UID: \"53231f02-8fd4-44e9-9181-d6e127dfdd42\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496210-cc4vr" Jan 30 11:30:00 crc kubenswrapper[4869]: I0130 11:30:00.254341 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/53231f02-8fd4-44e9-9181-d6e127dfdd42-secret-volume\") pod \"collect-profiles-29496210-cc4vr\" (UID: \"53231f02-8fd4-44e9-9181-d6e127dfdd42\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496210-cc4vr" Jan 30 11:30:00 crc kubenswrapper[4869]: I0130 11:30:00.254430 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/53231f02-8fd4-44e9-9181-d6e127dfdd42-config-volume\") pod \"collect-profiles-29496210-cc4vr\" (UID: \"53231f02-8fd4-44e9-9181-d6e127dfdd42\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496210-cc4vr" Jan 30 11:30:00 crc kubenswrapper[4869]: I0130 11:30:00.355026 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/53231f02-8fd4-44e9-9181-d6e127dfdd42-config-volume\") pod \"collect-profiles-29496210-cc4vr\" (UID: \"53231f02-8fd4-44e9-9181-d6e127dfdd42\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496210-cc4vr" Jan 30 11:30:00 crc kubenswrapper[4869]: I0130 11:30:00.355096 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjglm\" (UniqueName: \"kubernetes.io/projected/53231f02-8fd4-44e9-9181-d6e127dfdd42-kube-api-access-hjglm\") pod \"collect-profiles-29496210-cc4vr\" (UID: \"53231f02-8fd4-44e9-9181-d6e127dfdd42\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496210-cc4vr" Jan 30 11:30:00 crc kubenswrapper[4869]: I0130 11:30:00.355130 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/53231f02-8fd4-44e9-9181-d6e127dfdd42-secret-volume\") pod \"collect-profiles-29496210-cc4vr\" (UID: \"53231f02-8fd4-44e9-9181-d6e127dfdd42\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496210-cc4vr" Jan 30 11:30:00 crc kubenswrapper[4869]: I0130 11:30:00.356586 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/53231f02-8fd4-44e9-9181-d6e127dfdd42-config-volume\") pod \"collect-profiles-29496210-cc4vr\" (UID: \"53231f02-8fd4-44e9-9181-d6e127dfdd42\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496210-cc4vr" Jan 30 11:30:00 crc kubenswrapper[4869]: I0130 11:30:00.361261 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/53231f02-8fd4-44e9-9181-d6e127dfdd42-secret-volume\") pod \"collect-profiles-29496210-cc4vr\" (UID: \"53231f02-8fd4-44e9-9181-d6e127dfdd42\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496210-cc4vr" Jan 30 11:30:00 crc kubenswrapper[4869]: I0130 11:30:00.373856 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjglm\" (UniqueName: \"kubernetes.io/projected/53231f02-8fd4-44e9-9181-d6e127dfdd42-kube-api-access-hjglm\") pod \"collect-profiles-29496210-cc4vr\" (UID: \"53231f02-8fd4-44e9-9181-d6e127dfdd42\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496210-cc4vr" Jan 30 11:30:00 crc kubenswrapper[4869]: I0130 11:30:00.531663 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496210-cc4vr" Jan 30 11:30:00 crc kubenswrapper[4869]: I0130 11:30:00.974951 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496210-cc4vr"] Jan 30 11:30:01 crc kubenswrapper[4869]: I0130 11:30:01.147766 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496210-cc4vr" event={"ID":"53231f02-8fd4-44e9-9181-d6e127dfdd42","Type":"ContainerStarted","Data":"47071ca04c8e9c1a5f33b709679ae269691b6ba7f5d7d378991f4e70e8e83207"} Jan 30 11:30:02 crc kubenswrapper[4869]: I0130 11:30:02.157001 4869 generic.go:334] "Generic (PLEG): container finished" podID="53231f02-8fd4-44e9-9181-d6e127dfdd42" containerID="9ca7d0e53d0dd09c51b2eb8f226a19ee05cd94c34d0eaeded756d8e633b552fd" exitCode=0 Jan 30 11:30:02 crc kubenswrapper[4869]: I0130 11:30:02.157058 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496210-cc4vr" event={"ID":"53231f02-8fd4-44e9-9181-d6e127dfdd42","Type":"ContainerDied","Data":"9ca7d0e53d0dd09c51b2eb8f226a19ee05cd94c34d0eaeded756d8e633b552fd"} Jan 30 11:30:03 crc kubenswrapper[4869]: I0130 11:30:03.447354 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496210-cc4vr" Jan 30 11:30:03 crc kubenswrapper[4869]: I0130 11:30:03.504109 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/53231f02-8fd4-44e9-9181-d6e127dfdd42-secret-volume\") pod \"53231f02-8fd4-44e9-9181-d6e127dfdd42\" (UID: \"53231f02-8fd4-44e9-9181-d6e127dfdd42\") " Jan 30 11:30:03 crc kubenswrapper[4869]: I0130 11:30:03.504219 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/53231f02-8fd4-44e9-9181-d6e127dfdd42-config-volume\") pod \"53231f02-8fd4-44e9-9181-d6e127dfdd42\" (UID: \"53231f02-8fd4-44e9-9181-d6e127dfdd42\") " Jan 30 11:30:03 crc kubenswrapper[4869]: I0130 11:30:03.504350 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjglm\" (UniqueName: \"kubernetes.io/projected/53231f02-8fd4-44e9-9181-d6e127dfdd42-kube-api-access-hjglm\") pod \"53231f02-8fd4-44e9-9181-d6e127dfdd42\" (UID: \"53231f02-8fd4-44e9-9181-d6e127dfdd42\") " Jan 30 11:30:03 crc kubenswrapper[4869]: I0130 11:30:03.505119 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53231f02-8fd4-44e9-9181-d6e127dfdd42-config-volume" (OuterVolumeSpecName: "config-volume") pod "53231f02-8fd4-44e9-9181-d6e127dfdd42" (UID: "53231f02-8fd4-44e9-9181-d6e127dfdd42"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:30:03 crc kubenswrapper[4869]: I0130 11:30:03.510545 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53231f02-8fd4-44e9-9181-d6e127dfdd42-kube-api-access-hjglm" (OuterVolumeSpecName: "kube-api-access-hjglm") pod "53231f02-8fd4-44e9-9181-d6e127dfdd42" (UID: "53231f02-8fd4-44e9-9181-d6e127dfdd42"). InnerVolumeSpecName "kube-api-access-hjglm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:30:03 crc kubenswrapper[4869]: I0130 11:30:03.510575 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53231f02-8fd4-44e9-9181-d6e127dfdd42-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "53231f02-8fd4-44e9-9181-d6e127dfdd42" (UID: "53231f02-8fd4-44e9-9181-d6e127dfdd42"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:30:03 crc kubenswrapper[4869]: I0130 11:30:03.605679 4869 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/53231f02-8fd4-44e9-9181-d6e127dfdd42-config-volume\") on node \"crc\" DevicePath \"\"" Jan 30 11:30:03 crc kubenswrapper[4869]: I0130 11:30:03.605777 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjglm\" (UniqueName: \"kubernetes.io/projected/53231f02-8fd4-44e9-9181-d6e127dfdd42-kube-api-access-hjglm\") on node \"crc\" DevicePath \"\"" Jan 30 11:30:03 crc kubenswrapper[4869]: I0130 11:30:03.605797 4869 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/53231f02-8fd4-44e9-9181-d6e127dfdd42-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 30 11:30:04 crc kubenswrapper[4869]: I0130 11:30:04.174372 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496210-cc4vr" event={"ID":"53231f02-8fd4-44e9-9181-d6e127dfdd42","Type":"ContainerDied","Data":"47071ca04c8e9c1a5f33b709679ae269691b6ba7f5d7d378991f4e70e8e83207"} Jan 30 11:30:04 crc kubenswrapper[4869]: I0130 11:30:04.174416 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="47071ca04c8e9c1a5f33b709679ae269691b6ba7f5d7d378991f4e70e8e83207" Jan 30 11:30:04 crc kubenswrapper[4869]: I0130 11:30:04.174444 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496210-cc4vr" Jan 30 11:30:04 crc kubenswrapper[4869]: I0130 11:30:04.554420 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496165-hwsc7"] Jan 30 11:30:04 crc kubenswrapper[4869]: I0130 11:30:04.574737 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496165-hwsc7"] Jan 30 11:30:06 crc kubenswrapper[4869]: I0130 11:30:06.144510 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="624037a4-840b-4c6d-806b-6b0d2276328d" path="/var/lib/kubelet/pods/624037a4-840b-4c6d-806b-6b0d2276328d/volumes" Jan 30 11:30:14 crc kubenswrapper[4869]: I0130 11:30:14.766091 4869 scope.go:117] "RemoveContainer" containerID="79907ccc904a8e91f6ae91d0c6026a416f3e793ffd96165294ae8f767ea96913" Jan 30 11:30:21 crc kubenswrapper[4869]: I0130 11:30:21.769750 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:30:21 crc kubenswrapper[4869]: I0130 11:30:21.770013 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:30:45 crc kubenswrapper[4869]: I0130 11:30:45.332771 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-g9wt2"] Jan 30 11:30:45 crc kubenswrapper[4869]: E0130 11:30:45.333654 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53231f02-8fd4-44e9-9181-d6e127dfdd42" containerName="collect-profiles" Jan 30 11:30:45 crc kubenswrapper[4869]: I0130 11:30:45.333670 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="53231f02-8fd4-44e9-9181-d6e127dfdd42" containerName="collect-profiles" Jan 30 11:30:45 crc kubenswrapper[4869]: I0130 11:30:45.333849 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="53231f02-8fd4-44e9-9181-d6e127dfdd42" containerName="collect-profiles" Jan 30 11:30:45 crc kubenswrapper[4869]: I0130 11:30:45.334915 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g9wt2" Jan 30 11:30:45 crc kubenswrapper[4869]: I0130 11:30:45.349656 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g9wt2"] Jan 30 11:30:45 crc kubenswrapper[4869]: I0130 11:30:45.492019 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e24286c1-22cb-405f-a601-bfc3417da8ed-utilities\") pod \"certified-operators-g9wt2\" (UID: \"e24286c1-22cb-405f-a601-bfc3417da8ed\") " pod="openshift-marketplace/certified-operators-g9wt2" Jan 30 11:30:45 crc kubenswrapper[4869]: I0130 11:30:45.492089 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qjsb\" (UniqueName: \"kubernetes.io/projected/e24286c1-22cb-405f-a601-bfc3417da8ed-kube-api-access-8qjsb\") pod \"certified-operators-g9wt2\" (UID: \"e24286c1-22cb-405f-a601-bfc3417da8ed\") " pod="openshift-marketplace/certified-operators-g9wt2" Jan 30 11:30:45 crc kubenswrapper[4869]: I0130 11:30:45.492144 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e24286c1-22cb-405f-a601-bfc3417da8ed-catalog-content\") pod \"certified-operators-g9wt2\" (UID: \"e24286c1-22cb-405f-a601-bfc3417da8ed\") " pod="openshift-marketplace/certified-operators-g9wt2" Jan 30 11:30:45 crc kubenswrapper[4869]: I0130 11:30:45.593868 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e24286c1-22cb-405f-a601-bfc3417da8ed-catalog-content\") pod \"certified-operators-g9wt2\" (UID: \"e24286c1-22cb-405f-a601-bfc3417da8ed\") " pod="openshift-marketplace/certified-operators-g9wt2" Jan 30 11:30:45 crc kubenswrapper[4869]: I0130 11:30:45.593978 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e24286c1-22cb-405f-a601-bfc3417da8ed-utilities\") pod \"certified-operators-g9wt2\" (UID: \"e24286c1-22cb-405f-a601-bfc3417da8ed\") " pod="openshift-marketplace/certified-operators-g9wt2" Jan 30 11:30:45 crc kubenswrapper[4869]: I0130 11:30:45.594014 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qjsb\" (UniqueName: \"kubernetes.io/projected/e24286c1-22cb-405f-a601-bfc3417da8ed-kube-api-access-8qjsb\") pod \"certified-operators-g9wt2\" (UID: \"e24286c1-22cb-405f-a601-bfc3417da8ed\") " pod="openshift-marketplace/certified-operators-g9wt2" Jan 30 11:30:45 crc kubenswrapper[4869]: I0130 11:30:45.594493 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e24286c1-22cb-405f-a601-bfc3417da8ed-catalog-content\") pod \"certified-operators-g9wt2\" (UID: \"e24286c1-22cb-405f-a601-bfc3417da8ed\") " pod="openshift-marketplace/certified-operators-g9wt2" Jan 30 11:30:45 crc kubenswrapper[4869]: I0130 11:30:45.594571 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e24286c1-22cb-405f-a601-bfc3417da8ed-utilities\") pod \"certified-operators-g9wt2\" (UID: \"e24286c1-22cb-405f-a601-bfc3417da8ed\") " pod="openshift-marketplace/certified-operators-g9wt2" Jan 30 11:30:45 crc kubenswrapper[4869]: I0130 11:30:45.615032 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qjsb\" (UniqueName: \"kubernetes.io/projected/e24286c1-22cb-405f-a601-bfc3417da8ed-kube-api-access-8qjsb\") pod \"certified-operators-g9wt2\" (UID: \"e24286c1-22cb-405f-a601-bfc3417da8ed\") " pod="openshift-marketplace/certified-operators-g9wt2" Jan 30 11:30:45 crc kubenswrapper[4869]: I0130 11:30:45.664692 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g9wt2" Jan 30 11:30:46 crc kubenswrapper[4869]: I0130 11:30:46.144698 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g9wt2"] Jan 30 11:30:46 crc kubenswrapper[4869]: W0130 11:30:46.147950 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode24286c1_22cb_405f_a601_bfc3417da8ed.slice/crio-c907032572208dd6812cc396df335da1e7777a4629952a77f7f5318f0049766d WatchSource:0}: Error finding container c907032572208dd6812cc396df335da1e7777a4629952a77f7f5318f0049766d: Status 404 returned error can't find the container with id c907032572208dd6812cc396df335da1e7777a4629952a77f7f5318f0049766d Jan 30 11:30:46 crc kubenswrapper[4869]: I0130 11:30:46.453670 4869 generic.go:334] "Generic (PLEG): container finished" podID="e24286c1-22cb-405f-a601-bfc3417da8ed" containerID="be85b10e404cac1f90c7a7c3509868db1015b66055858d95bd9c502c6eb09048" exitCode=0 Jan 30 11:30:46 crc kubenswrapper[4869]: I0130 11:30:46.453994 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g9wt2" event={"ID":"e24286c1-22cb-405f-a601-bfc3417da8ed","Type":"ContainerDied","Data":"be85b10e404cac1f90c7a7c3509868db1015b66055858d95bd9c502c6eb09048"} Jan 30 11:30:46 crc kubenswrapper[4869]: I0130 11:30:46.454030 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g9wt2" event={"ID":"e24286c1-22cb-405f-a601-bfc3417da8ed","Type":"ContainerStarted","Data":"c907032572208dd6812cc396df335da1e7777a4629952a77f7f5318f0049766d"} Jan 30 11:30:47 crc kubenswrapper[4869]: I0130 11:30:47.463498 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g9wt2" event={"ID":"e24286c1-22cb-405f-a601-bfc3417da8ed","Type":"ContainerStarted","Data":"a5dce1f3423ae3ebcfbdb0b902c92e174deaa014db797f92a9439f672804a076"} Jan 30 11:30:48 crc kubenswrapper[4869]: I0130 11:30:48.472047 4869 generic.go:334] "Generic (PLEG): container finished" podID="e24286c1-22cb-405f-a601-bfc3417da8ed" containerID="a5dce1f3423ae3ebcfbdb0b902c92e174deaa014db797f92a9439f672804a076" exitCode=0 Jan 30 11:30:48 crc kubenswrapper[4869]: I0130 11:30:48.472095 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g9wt2" event={"ID":"e24286c1-22cb-405f-a601-bfc3417da8ed","Type":"ContainerDied","Data":"a5dce1f3423ae3ebcfbdb0b902c92e174deaa014db797f92a9439f672804a076"} Jan 30 11:30:48 crc kubenswrapper[4869]: I0130 11:30:48.472128 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g9wt2" event={"ID":"e24286c1-22cb-405f-a601-bfc3417da8ed","Type":"ContainerStarted","Data":"6672f5cfb161360630c7d7b14faac27b27cc60f052776d50ce3b62583da6332c"} Jan 30 11:30:48 crc kubenswrapper[4869]: I0130 11:30:48.491156 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-g9wt2" podStartSLOduration=2.069692601 podStartE2EDuration="3.491128796s" podCreationTimestamp="2026-01-30 11:30:45 +0000 UTC" firstStartedPulling="2026-01-30 11:30:46.456001218 +0000 UTC m=+2197.005877284" lastFinishedPulling="2026-01-30 11:30:47.877437413 +0000 UTC m=+2198.427313479" observedRunningTime="2026-01-30 11:30:48.489594813 +0000 UTC m=+2199.039470909" watchObservedRunningTime="2026-01-30 11:30:48.491128796 +0000 UTC m=+2199.041004862" Jan 30 11:30:51 crc kubenswrapper[4869]: I0130 11:30:51.769413 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:30:51 crc kubenswrapper[4869]: I0130 11:30:51.769788 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:30:51 crc kubenswrapper[4869]: I0130 11:30:51.769840 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 11:30:51 crc kubenswrapper[4869]: I0130 11:30:51.770557 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"463507ab061d257eb1b3adce78dea16b402a8685a0d66911924501e5413965d1"} pod="openshift-machine-config-operator/machine-config-daemon-99lr2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 11:30:51 crc kubenswrapper[4869]: I0130 11:30:51.770619 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" containerID="cri-o://463507ab061d257eb1b3adce78dea16b402a8685a0d66911924501e5413965d1" gracePeriod=600 Jan 30 11:30:52 crc kubenswrapper[4869]: I0130 11:30:52.499186 4869 generic.go:334] "Generic (PLEG): container finished" podID="ef13186b-7f82-4025-97e3-d899be8c207f" containerID="463507ab061d257eb1b3adce78dea16b402a8685a0d66911924501e5413965d1" exitCode=0 Jan 30 11:30:52 crc kubenswrapper[4869]: I0130 11:30:52.499267 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerDied","Data":"463507ab061d257eb1b3adce78dea16b402a8685a0d66911924501e5413965d1"} Jan 30 11:30:52 crc kubenswrapper[4869]: I0130 11:30:52.499763 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerStarted","Data":"b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee"} Jan 30 11:30:52 crc kubenswrapper[4869]: I0130 11:30:52.499789 4869 scope.go:117] "RemoveContainer" containerID="9aba8ccf7aed157a806b467d9bdb676ee5f7e186618126ccc5f4073fb897ab61" Jan 30 11:30:55 crc kubenswrapper[4869]: I0130 11:30:55.665075 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-g9wt2" Jan 30 11:30:55 crc kubenswrapper[4869]: I0130 11:30:55.674912 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-g9wt2" Jan 30 11:30:55 crc kubenswrapper[4869]: I0130 11:30:55.772358 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-g9wt2" Jan 30 11:30:56 crc kubenswrapper[4869]: I0130 11:30:56.567066 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-g9wt2" Jan 30 11:30:56 crc kubenswrapper[4869]: I0130 11:30:56.604857 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-g9wt2"] Jan 30 11:30:58 crc kubenswrapper[4869]: I0130 11:30:58.538791 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-g9wt2" podUID="e24286c1-22cb-405f-a601-bfc3417da8ed" containerName="registry-server" containerID="cri-o://6672f5cfb161360630c7d7b14faac27b27cc60f052776d50ce3b62583da6332c" gracePeriod=2 Jan 30 11:30:58 crc kubenswrapper[4869]: I0130 11:30:58.928295 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g9wt2" Jan 30 11:30:59 crc kubenswrapper[4869]: I0130 11:30:59.008490 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8qjsb\" (UniqueName: \"kubernetes.io/projected/e24286c1-22cb-405f-a601-bfc3417da8ed-kube-api-access-8qjsb\") pod \"e24286c1-22cb-405f-a601-bfc3417da8ed\" (UID: \"e24286c1-22cb-405f-a601-bfc3417da8ed\") " Jan 30 11:30:59 crc kubenswrapper[4869]: I0130 11:30:59.008655 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e24286c1-22cb-405f-a601-bfc3417da8ed-utilities\") pod \"e24286c1-22cb-405f-a601-bfc3417da8ed\" (UID: \"e24286c1-22cb-405f-a601-bfc3417da8ed\") " Jan 30 11:30:59 crc kubenswrapper[4869]: I0130 11:30:59.008746 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e24286c1-22cb-405f-a601-bfc3417da8ed-catalog-content\") pod \"e24286c1-22cb-405f-a601-bfc3417da8ed\" (UID: \"e24286c1-22cb-405f-a601-bfc3417da8ed\") " Jan 30 11:30:59 crc kubenswrapper[4869]: I0130 11:30:59.011922 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e24286c1-22cb-405f-a601-bfc3417da8ed-utilities" (OuterVolumeSpecName: "utilities") pod "e24286c1-22cb-405f-a601-bfc3417da8ed" (UID: "e24286c1-22cb-405f-a601-bfc3417da8ed"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:30:59 crc kubenswrapper[4869]: I0130 11:30:59.016157 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e24286c1-22cb-405f-a601-bfc3417da8ed-kube-api-access-8qjsb" (OuterVolumeSpecName: "kube-api-access-8qjsb") pod "e24286c1-22cb-405f-a601-bfc3417da8ed" (UID: "e24286c1-22cb-405f-a601-bfc3417da8ed"). InnerVolumeSpecName "kube-api-access-8qjsb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:30:59 crc kubenswrapper[4869]: I0130 11:30:59.062008 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e24286c1-22cb-405f-a601-bfc3417da8ed-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e24286c1-22cb-405f-a601-bfc3417da8ed" (UID: "e24286c1-22cb-405f-a601-bfc3417da8ed"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:30:59 crc kubenswrapper[4869]: I0130 11:30:59.110745 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8qjsb\" (UniqueName: \"kubernetes.io/projected/e24286c1-22cb-405f-a601-bfc3417da8ed-kube-api-access-8qjsb\") on node \"crc\" DevicePath \"\"" Jan 30 11:30:59 crc kubenswrapper[4869]: I0130 11:30:59.110796 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e24286c1-22cb-405f-a601-bfc3417da8ed-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 11:30:59 crc kubenswrapper[4869]: I0130 11:30:59.110805 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e24286c1-22cb-405f-a601-bfc3417da8ed-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 11:30:59 crc kubenswrapper[4869]: I0130 11:30:59.551142 4869 generic.go:334] "Generic (PLEG): container finished" podID="e24286c1-22cb-405f-a601-bfc3417da8ed" containerID="6672f5cfb161360630c7d7b14faac27b27cc60f052776d50ce3b62583da6332c" exitCode=0 Jan 30 11:30:59 crc kubenswrapper[4869]: I0130 11:30:59.551210 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g9wt2" Jan 30 11:30:59 crc kubenswrapper[4869]: I0130 11:30:59.551226 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g9wt2" event={"ID":"e24286c1-22cb-405f-a601-bfc3417da8ed","Type":"ContainerDied","Data":"6672f5cfb161360630c7d7b14faac27b27cc60f052776d50ce3b62583da6332c"} Jan 30 11:30:59 crc kubenswrapper[4869]: I0130 11:30:59.551505 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g9wt2" event={"ID":"e24286c1-22cb-405f-a601-bfc3417da8ed","Type":"ContainerDied","Data":"c907032572208dd6812cc396df335da1e7777a4629952a77f7f5318f0049766d"} Jan 30 11:30:59 crc kubenswrapper[4869]: I0130 11:30:59.551540 4869 scope.go:117] "RemoveContainer" containerID="6672f5cfb161360630c7d7b14faac27b27cc60f052776d50ce3b62583da6332c" Jan 30 11:30:59 crc kubenswrapper[4869]: I0130 11:30:59.574685 4869 scope.go:117] "RemoveContainer" containerID="a5dce1f3423ae3ebcfbdb0b902c92e174deaa014db797f92a9439f672804a076" Jan 30 11:30:59 crc kubenswrapper[4869]: I0130 11:30:59.600641 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-g9wt2"] Jan 30 11:30:59 crc kubenswrapper[4869]: I0130 11:30:59.614940 4869 scope.go:117] "RemoveContainer" containerID="be85b10e404cac1f90c7a7c3509868db1015b66055858d95bd9c502c6eb09048" Jan 30 11:30:59 crc kubenswrapper[4869]: I0130 11:30:59.615788 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-g9wt2"] Jan 30 11:30:59 crc kubenswrapper[4869]: I0130 11:30:59.629891 4869 scope.go:117] "RemoveContainer" containerID="6672f5cfb161360630c7d7b14faac27b27cc60f052776d50ce3b62583da6332c" Jan 30 11:30:59 crc kubenswrapper[4869]: E0130 11:30:59.630355 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6672f5cfb161360630c7d7b14faac27b27cc60f052776d50ce3b62583da6332c\": container with ID starting with 6672f5cfb161360630c7d7b14faac27b27cc60f052776d50ce3b62583da6332c not found: ID does not exist" containerID="6672f5cfb161360630c7d7b14faac27b27cc60f052776d50ce3b62583da6332c" Jan 30 11:30:59 crc kubenswrapper[4869]: I0130 11:30:59.630520 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6672f5cfb161360630c7d7b14faac27b27cc60f052776d50ce3b62583da6332c"} err="failed to get container status \"6672f5cfb161360630c7d7b14faac27b27cc60f052776d50ce3b62583da6332c\": rpc error: code = NotFound desc = could not find container \"6672f5cfb161360630c7d7b14faac27b27cc60f052776d50ce3b62583da6332c\": container with ID starting with 6672f5cfb161360630c7d7b14faac27b27cc60f052776d50ce3b62583da6332c not found: ID does not exist" Jan 30 11:30:59 crc kubenswrapper[4869]: I0130 11:30:59.630621 4869 scope.go:117] "RemoveContainer" containerID="a5dce1f3423ae3ebcfbdb0b902c92e174deaa014db797f92a9439f672804a076" Jan 30 11:30:59 crc kubenswrapper[4869]: E0130 11:30:59.631150 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5dce1f3423ae3ebcfbdb0b902c92e174deaa014db797f92a9439f672804a076\": container with ID starting with a5dce1f3423ae3ebcfbdb0b902c92e174deaa014db797f92a9439f672804a076 not found: ID does not exist" containerID="a5dce1f3423ae3ebcfbdb0b902c92e174deaa014db797f92a9439f672804a076" Jan 30 11:30:59 crc kubenswrapper[4869]: I0130 11:30:59.631242 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5dce1f3423ae3ebcfbdb0b902c92e174deaa014db797f92a9439f672804a076"} err="failed to get container status \"a5dce1f3423ae3ebcfbdb0b902c92e174deaa014db797f92a9439f672804a076\": rpc error: code = NotFound desc = could not find container \"a5dce1f3423ae3ebcfbdb0b902c92e174deaa014db797f92a9439f672804a076\": container with ID starting with a5dce1f3423ae3ebcfbdb0b902c92e174deaa014db797f92a9439f672804a076 not found: ID does not exist" Jan 30 11:30:59 crc kubenswrapper[4869]: I0130 11:30:59.631306 4869 scope.go:117] "RemoveContainer" containerID="be85b10e404cac1f90c7a7c3509868db1015b66055858d95bd9c502c6eb09048" Jan 30 11:30:59 crc kubenswrapper[4869]: E0130 11:30:59.631639 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be85b10e404cac1f90c7a7c3509868db1015b66055858d95bd9c502c6eb09048\": container with ID starting with be85b10e404cac1f90c7a7c3509868db1015b66055858d95bd9c502c6eb09048 not found: ID does not exist" containerID="be85b10e404cac1f90c7a7c3509868db1015b66055858d95bd9c502c6eb09048" Jan 30 11:30:59 crc kubenswrapper[4869]: I0130 11:30:59.631683 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be85b10e404cac1f90c7a7c3509868db1015b66055858d95bd9c502c6eb09048"} err="failed to get container status \"be85b10e404cac1f90c7a7c3509868db1015b66055858d95bd9c502c6eb09048\": rpc error: code = NotFound desc = could not find container \"be85b10e404cac1f90c7a7c3509868db1015b66055858d95bd9c502c6eb09048\": container with ID starting with be85b10e404cac1f90c7a7c3509868db1015b66055858d95bd9c502c6eb09048 not found: ID does not exist" Jan 30 11:31:00 crc kubenswrapper[4869]: I0130 11:31:00.141642 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e24286c1-22cb-405f-a601-bfc3417da8ed" path="/var/lib/kubelet/pods/e24286c1-22cb-405f-a601-bfc3417da8ed/volumes" Jan 30 11:31:25 crc kubenswrapper[4869]: I0130 11:31:25.964824 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-29xct"] Jan 30 11:31:25 crc kubenswrapper[4869]: E0130 11:31:25.965799 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e24286c1-22cb-405f-a601-bfc3417da8ed" containerName="registry-server" Jan 30 11:31:25 crc kubenswrapper[4869]: I0130 11:31:25.965817 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e24286c1-22cb-405f-a601-bfc3417da8ed" containerName="registry-server" Jan 30 11:31:25 crc kubenswrapper[4869]: E0130 11:31:25.965838 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e24286c1-22cb-405f-a601-bfc3417da8ed" containerName="extract-utilities" Jan 30 11:31:25 crc kubenswrapper[4869]: I0130 11:31:25.965849 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e24286c1-22cb-405f-a601-bfc3417da8ed" containerName="extract-utilities" Jan 30 11:31:25 crc kubenswrapper[4869]: E0130 11:31:25.965869 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e24286c1-22cb-405f-a601-bfc3417da8ed" containerName="extract-content" Jan 30 11:31:25 crc kubenswrapper[4869]: I0130 11:31:25.965876 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e24286c1-22cb-405f-a601-bfc3417da8ed" containerName="extract-content" Jan 30 11:31:25 crc kubenswrapper[4869]: I0130 11:31:25.966029 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="e24286c1-22cb-405f-a601-bfc3417da8ed" containerName="registry-server" Jan 30 11:31:25 crc kubenswrapper[4869]: I0130 11:31:25.968145 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-29xct" Jan 30 11:31:25 crc kubenswrapper[4869]: I0130 11:31:25.995163 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-29xct"] Jan 30 11:31:26 crc kubenswrapper[4869]: I0130 11:31:26.105340 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85677f25-9105-41fe-84cb-d85703e54c57-utilities\") pod \"redhat-marketplace-29xct\" (UID: \"85677f25-9105-41fe-84cb-d85703e54c57\") " pod="openshift-marketplace/redhat-marketplace-29xct" Jan 30 11:31:26 crc kubenswrapper[4869]: I0130 11:31:26.105435 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vkss\" (UniqueName: \"kubernetes.io/projected/85677f25-9105-41fe-84cb-d85703e54c57-kube-api-access-4vkss\") pod \"redhat-marketplace-29xct\" (UID: \"85677f25-9105-41fe-84cb-d85703e54c57\") " pod="openshift-marketplace/redhat-marketplace-29xct" Jan 30 11:31:26 crc kubenswrapper[4869]: I0130 11:31:26.105464 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85677f25-9105-41fe-84cb-d85703e54c57-catalog-content\") pod \"redhat-marketplace-29xct\" (UID: \"85677f25-9105-41fe-84cb-d85703e54c57\") " pod="openshift-marketplace/redhat-marketplace-29xct" Jan 30 11:31:26 crc kubenswrapper[4869]: I0130 11:31:26.207636 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85677f25-9105-41fe-84cb-d85703e54c57-utilities\") pod \"redhat-marketplace-29xct\" (UID: \"85677f25-9105-41fe-84cb-d85703e54c57\") " pod="openshift-marketplace/redhat-marketplace-29xct" Jan 30 11:31:26 crc kubenswrapper[4869]: I0130 11:31:26.207787 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vkss\" (UniqueName: \"kubernetes.io/projected/85677f25-9105-41fe-84cb-d85703e54c57-kube-api-access-4vkss\") pod \"redhat-marketplace-29xct\" (UID: \"85677f25-9105-41fe-84cb-d85703e54c57\") " pod="openshift-marketplace/redhat-marketplace-29xct" Jan 30 11:31:26 crc kubenswrapper[4869]: I0130 11:31:26.207813 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85677f25-9105-41fe-84cb-d85703e54c57-catalog-content\") pod \"redhat-marketplace-29xct\" (UID: \"85677f25-9105-41fe-84cb-d85703e54c57\") " pod="openshift-marketplace/redhat-marketplace-29xct" Jan 30 11:31:26 crc kubenswrapper[4869]: I0130 11:31:26.208441 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85677f25-9105-41fe-84cb-d85703e54c57-utilities\") pod \"redhat-marketplace-29xct\" (UID: \"85677f25-9105-41fe-84cb-d85703e54c57\") " pod="openshift-marketplace/redhat-marketplace-29xct" Jan 30 11:31:26 crc kubenswrapper[4869]: I0130 11:31:26.208478 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85677f25-9105-41fe-84cb-d85703e54c57-catalog-content\") pod \"redhat-marketplace-29xct\" (UID: \"85677f25-9105-41fe-84cb-d85703e54c57\") " pod="openshift-marketplace/redhat-marketplace-29xct" Jan 30 11:31:26 crc kubenswrapper[4869]: I0130 11:31:26.228912 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vkss\" (UniqueName: \"kubernetes.io/projected/85677f25-9105-41fe-84cb-d85703e54c57-kube-api-access-4vkss\") pod \"redhat-marketplace-29xct\" (UID: \"85677f25-9105-41fe-84cb-d85703e54c57\") " pod="openshift-marketplace/redhat-marketplace-29xct" Jan 30 11:31:26 crc kubenswrapper[4869]: I0130 11:31:26.289177 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-29xct" Jan 30 11:31:26 crc kubenswrapper[4869]: I0130 11:31:26.712423 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-29xct"] Jan 30 11:31:26 crc kubenswrapper[4869]: I0130 11:31:26.734249 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-29xct" event={"ID":"85677f25-9105-41fe-84cb-d85703e54c57","Type":"ContainerStarted","Data":"93215255ab556877dc65ad3e0a3c4dd36603d1a77fcc5fb24788711ea788dc1c"} Jan 30 11:31:27 crc kubenswrapper[4869]: I0130 11:31:27.741913 4869 generic.go:334] "Generic (PLEG): container finished" podID="85677f25-9105-41fe-84cb-d85703e54c57" containerID="263a6713da2aaebb343013759e8d129a8e7cf2d1d1681546228ef2798308df59" exitCode=0 Jan 30 11:31:27 crc kubenswrapper[4869]: I0130 11:31:27.741992 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-29xct" event={"ID":"85677f25-9105-41fe-84cb-d85703e54c57","Type":"ContainerDied","Data":"263a6713da2aaebb343013759e8d129a8e7cf2d1d1681546228ef2798308df59"} Jan 30 11:31:28 crc kubenswrapper[4869]: I0130 11:31:28.752457 4869 generic.go:334] "Generic (PLEG): container finished" podID="85677f25-9105-41fe-84cb-d85703e54c57" containerID="c45ef1140a81bec234f1677fb61fd8f8983e27918bc84c2bdfe0321a9cf89a4e" exitCode=0 Jan 30 11:31:28 crc kubenswrapper[4869]: I0130 11:31:28.752790 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-29xct" event={"ID":"85677f25-9105-41fe-84cb-d85703e54c57","Type":"ContainerDied","Data":"c45ef1140a81bec234f1677fb61fd8f8983e27918bc84c2bdfe0321a9cf89a4e"} Jan 30 11:31:29 crc kubenswrapper[4869]: I0130 11:31:29.761258 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-29xct" event={"ID":"85677f25-9105-41fe-84cb-d85703e54c57","Type":"ContainerStarted","Data":"8f17cb32bfb21c9620ec7c31fd35834c62cd0c0b622c3109510e6094744ebe6d"} Jan 30 11:31:29 crc kubenswrapper[4869]: I0130 11:31:29.776806 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-29xct" podStartSLOduration=3.35672774 podStartE2EDuration="4.776781607s" podCreationTimestamp="2026-01-30 11:31:25 +0000 UTC" firstStartedPulling="2026-01-30 11:31:27.743545021 +0000 UTC m=+2238.293421087" lastFinishedPulling="2026-01-30 11:31:29.163598888 +0000 UTC m=+2239.713474954" observedRunningTime="2026-01-30 11:31:29.776332894 +0000 UTC m=+2240.326208990" watchObservedRunningTime="2026-01-30 11:31:29.776781607 +0000 UTC m=+2240.326657673" Jan 30 11:31:36 crc kubenswrapper[4869]: I0130 11:31:36.289251 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-29xct" Jan 30 11:31:36 crc kubenswrapper[4869]: I0130 11:31:36.289788 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-29xct" Jan 30 11:31:36 crc kubenswrapper[4869]: I0130 11:31:36.334897 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-29xct" Jan 30 11:31:36 crc kubenswrapper[4869]: I0130 11:31:36.851124 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-29xct" Jan 30 11:31:36 crc kubenswrapper[4869]: I0130 11:31:36.896437 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-29xct"] Jan 30 11:31:38 crc kubenswrapper[4869]: I0130 11:31:38.820401 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-29xct" podUID="85677f25-9105-41fe-84cb-d85703e54c57" containerName="registry-server" containerID="cri-o://8f17cb32bfb21c9620ec7c31fd35834c62cd0c0b622c3109510e6094744ebe6d" gracePeriod=2 Jan 30 11:31:39 crc kubenswrapper[4869]: I0130 11:31:39.193801 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-29xct" Jan 30 11:31:39 crc kubenswrapper[4869]: I0130 11:31:39.307049 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85677f25-9105-41fe-84cb-d85703e54c57-utilities\") pod \"85677f25-9105-41fe-84cb-d85703e54c57\" (UID: \"85677f25-9105-41fe-84cb-d85703e54c57\") " Jan 30 11:31:39 crc kubenswrapper[4869]: I0130 11:31:39.307182 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vkss\" (UniqueName: \"kubernetes.io/projected/85677f25-9105-41fe-84cb-d85703e54c57-kube-api-access-4vkss\") pod \"85677f25-9105-41fe-84cb-d85703e54c57\" (UID: \"85677f25-9105-41fe-84cb-d85703e54c57\") " Jan 30 11:31:39 crc kubenswrapper[4869]: I0130 11:31:39.307407 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85677f25-9105-41fe-84cb-d85703e54c57-catalog-content\") pod \"85677f25-9105-41fe-84cb-d85703e54c57\" (UID: \"85677f25-9105-41fe-84cb-d85703e54c57\") " Jan 30 11:31:39 crc kubenswrapper[4869]: I0130 11:31:39.311777 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85677f25-9105-41fe-84cb-d85703e54c57-utilities" (OuterVolumeSpecName: "utilities") pod "85677f25-9105-41fe-84cb-d85703e54c57" (UID: "85677f25-9105-41fe-84cb-d85703e54c57"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:31:39 crc kubenswrapper[4869]: I0130 11:31:39.317938 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85677f25-9105-41fe-84cb-d85703e54c57-kube-api-access-4vkss" (OuterVolumeSpecName: "kube-api-access-4vkss") pod "85677f25-9105-41fe-84cb-d85703e54c57" (UID: "85677f25-9105-41fe-84cb-d85703e54c57"). InnerVolumeSpecName "kube-api-access-4vkss". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:31:39 crc kubenswrapper[4869]: I0130 11:31:39.334085 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85677f25-9105-41fe-84cb-d85703e54c57-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "85677f25-9105-41fe-84cb-d85703e54c57" (UID: "85677f25-9105-41fe-84cb-d85703e54c57"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:31:39 crc kubenswrapper[4869]: I0130 11:31:39.409552 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vkss\" (UniqueName: \"kubernetes.io/projected/85677f25-9105-41fe-84cb-d85703e54c57-kube-api-access-4vkss\") on node \"crc\" DevicePath \"\"" Jan 30 11:31:39 crc kubenswrapper[4869]: I0130 11:31:39.409588 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85677f25-9105-41fe-84cb-d85703e54c57-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 11:31:39 crc kubenswrapper[4869]: I0130 11:31:39.409599 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85677f25-9105-41fe-84cb-d85703e54c57-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 11:31:39 crc kubenswrapper[4869]: I0130 11:31:39.829308 4869 generic.go:334] "Generic (PLEG): container finished" podID="85677f25-9105-41fe-84cb-d85703e54c57" containerID="8f17cb32bfb21c9620ec7c31fd35834c62cd0c0b622c3109510e6094744ebe6d" exitCode=0 Jan 30 11:31:39 crc kubenswrapper[4869]: I0130 11:31:39.829367 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-29xct" Jan 30 11:31:39 crc kubenswrapper[4869]: I0130 11:31:39.829367 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-29xct" event={"ID":"85677f25-9105-41fe-84cb-d85703e54c57","Type":"ContainerDied","Data":"8f17cb32bfb21c9620ec7c31fd35834c62cd0c0b622c3109510e6094744ebe6d"} Jan 30 11:31:39 crc kubenswrapper[4869]: I0130 11:31:39.829409 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-29xct" event={"ID":"85677f25-9105-41fe-84cb-d85703e54c57","Type":"ContainerDied","Data":"93215255ab556877dc65ad3e0a3c4dd36603d1a77fcc5fb24788711ea788dc1c"} Jan 30 11:31:39 crc kubenswrapper[4869]: I0130 11:31:39.829433 4869 scope.go:117] "RemoveContainer" containerID="8f17cb32bfb21c9620ec7c31fd35834c62cd0c0b622c3109510e6094744ebe6d" Jan 30 11:31:39 crc kubenswrapper[4869]: I0130 11:31:39.846389 4869 scope.go:117] "RemoveContainer" containerID="c45ef1140a81bec234f1677fb61fd8f8983e27918bc84c2bdfe0321a9cf89a4e" Jan 30 11:31:39 crc kubenswrapper[4869]: I0130 11:31:39.862323 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-29xct"] Jan 30 11:31:39 crc kubenswrapper[4869]: I0130 11:31:39.867693 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-29xct"] Jan 30 11:31:39 crc kubenswrapper[4869]: I0130 11:31:39.880976 4869 scope.go:117] "RemoveContainer" containerID="263a6713da2aaebb343013759e8d129a8e7cf2d1d1681546228ef2798308df59" Jan 30 11:31:39 crc kubenswrapper[4869]: I0130 11:31:39.896107 4869 scope.go:117] "RemoveContainer" containerID="8f17cb32bfb21c9620ec7c31fd35834c62cd0c0b622c3109510e6094744ebe6d" Jan 30 11:31:39 crc kubenswrapper[4869]: E0130 11:31:39.896577 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f17cb32bfb21c9620ec7c31fd35834c62cd0c0b622c3109510e6094744ebe6d\": container with ID starting with 8f17cb32bfb21c9620ec7c31fd35834c62cd0c0b622c3109510e6094744ebe6d not found: ID does not exist" containerID="8f17cb32bfb21c9620ec7c31fd35834c62cd0c0b622c3109510e6094744ebe6d" Jan 30 11:31:39 crc kubenswrapper[4869]: I0130 11:31:39.896616 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f17cb32bfb21c9620ec7c31fd35834c62cd0c0b622c3109510e6094744ebe6d"} err="failed to get container status \"8f17cb32bfb21c9620ec7c31fd35834c62cd0c0b622c3109510e6094744ebe6d\": rpc error: code = NotFound desc = could not find container \"8f17cb32bfb21c9620ec7c31fd35834c62cd0c0b622c3109510e6094744ebe6d\": container with ID starting with 8f17cb32bfb21c9620ec7c31fd35834c62cd0c0b622c3109510e6094744ebe6d not found: ID does not exist" Jan 30 11:31:39 crc kubenswrapper[4869]: I0130 11:31:39.896639 4869 scope.go:117] "RemoveContainer" containerID="c45ef1140a81bec234f1677fb61fd8f8983e27918bc84c2bdfe0321a9cf89a4e" Jan 30 11:31:39 crc kubenswrapper[4869]: E0130 11:31:39.896966 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c45ef1140a81bec234f1677fb61fd8f8983e27918bc84c2bdfe0321a9cf89a4e\": container with ID starting with c45ef1140a81bec234f1677fb61fd8f8983e27918bc84c2bdfe0321a9cf89a4e not found: ID does not exist" containerID="c45ef1140a81bec234f1677fb61fd8f8983e27918bc84c2bdfe0321a9cf89a4e" Jan 30 11:31:39 crc kubenswrapper[4869]: I0130 11:31:39.897013 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c45ef1140a81bec234f1677fb61fd8f8983e27918bc84c2bdfe0321a9cf89a4e"} err="failed to get container status \"c45ef1140a81bec234f1677fb61fd8f8983e27918bc84c2bdfe0321a9cf89a4e\": rpc error: code = NotFound desc = could not find container \"c45ef1140a81bec234f1677fb61fd8f8983e27918bc84c2bdfe0321a9cf89a4e\": container with ID starting with c45ef1140a81bec234f1677fb61fd8f8983e27918bc84c2bdfe0321a9cf89a4e not found: ID does not exist" Jan 30 11:31:39 crc kubenswrapper[4869]: I0130 11:31:39.897045 4869 scope.go:117] "RemoveContainer" containerID="263a6713da2aaebb343013759e8d129a8e7cf2d1d1681546228ef2798308df59" Jan 30 11:31:39 crc kubenswrapper[4869]: E0130 11:31:39.898044 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"263a6713da2aaebb343013759e8d129a8e7cf2d1d1681546228ef2798308df59\": container with ID starting with 263a6713da2aaebb343013759e8d129a8e7cf2d1d1681546228ef2798308df59 not found: ID does not exist" containerID="263a6713da2aaebb343013759e8d129a8e7cf2d1d1681546228ef2798308df59" Jan 30 11:31:39 crc kubenswrapper[4869]: I0130 11:31:39.898075 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"263a6713da2aaebb343013759e8d129a8e7cf2d1d1681546228ef2798308df59"} err="failed to get container status \"263a6713da2aaebb343013759e8d129a8e7cf2d1d1681546228ef2798308df59\": rpc error: code = NotFound desc = could not find container \"263a6713da2aaebb343013759e8d129a8e7cf2d1d1681546228ef2798308df59\": container with ID starting with 263a6713da2aaebb343013759e8d129a8e7cf2d1d1681546228ef2798308df59 not found: ID does not exist" Jan 30 11:31:40 crc kubenswrapper[4869]: I0130 11:31:40.146654 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85677f25-9105-41fe-84cb-d85703e54c57" path="/var/lib/kubelet/pods/85677f25-9105-41fe-84cb-d85703e54c57/volumes" Jan 30 11:33:21 crc kubenswrapper[4869]: I0130 11:33:21.769501 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:33:21 crc kubenswrapper[4869]: I0130 11:33:21.770122 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:33:51 crc kubenswrapper[4869]: I0130 11:33:51.769118 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:33:51 crc kubenswrapper[4869]: I0130 11:33:51.769738 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:33:59 crc kubenswrapper[4869]: I0130 11:33:59.823918 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-h2jqp"] Jan 30 11:33:59 crc kubenswrapper[4869]: E0130 11:33:59.824881 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85677f25-9105-41fe-84cb-d85703e54c57" containerName="registry-server" Jan 30 11:33:59 crc kubenswrapper[4869]: I0130 11:33:59.824899 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="85677f25-9105-41fe-84cb-d85703e54c57" containerName="registry-server" Jan 30 11:33:59 crc kubenswrapper[4869]: E0130 11:33:59.824915 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85677f25-9105-41fe-84cb-d85703e54c57" containerName="extract-content" Jan 30 11:33:59 crc kubenswrapper[4869]: I0130 11:33:59.824922 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="85677f25-9105-41fe-84cb-d85703e54c57" containerName="extract-content" Jan 30 11:33:59 crc kubenswrapper[4869]: E0130 11:33:59.824940 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85677f25-9105-41fe-84cb-d85703e54c57" containerName="extract-utilities" Jan 30 11:33:59 crc kubenswrapper[4869]: I0130 11:33:59.824947 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="85677f25-9105-41fe-84cb-d85703e54c57" containerName="extract-utilities" Jan 30 11:33:59 crc kubenswrapper[4869]: I0130 11:33:59.825097 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="85677f25-9105-41fe-84cb-d85703e54c57" containerName="registry-server" Jan 30 11:33:59 crc kubenswrapper[4869]: I0130 11:33:59.826095 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h2jqp" Jan 30 11:33:59 crc kubenswrapper[4869]: I0130 11:33:59.839863 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-h2jqp"] Jan 30 11:33:59 crc kubenswrapper[4869]: I0130 11:33:59.908420 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/576800d7-526c-45d9-b03b-5562a7f80352-catalog-content\") pod \"redhat-operators-h2jqp\" (UID: \"576800d7-526c-45d9-b03b-5562a7f80352\") " pod="openshift-marketplace/redhat-operators-h2jqp" Jan 30 11:33:59 crc kubenswrapper[4869]: I0130 11:33:59.908486 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/576800d7-526c-45d9-b03b-5562a7f80352-utilities\") pod \"redhat-operators-h2jqp\" (UID: \"576800d7-526c-45d9-b03b-5562a7f80352\") " pod="openshift-marketplace/redhat-operators-h2jqp" Jan 30 11:33:59 crc kubenswrapper[4869]: I0130 11:33:59.908516 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjsqp\" (UniqueName: \"kubernetes.io/projected/576800d7-526c-45d9-b03b-5562a7f80352-kube-api-access-zjsqp\") pod \"redhat-operators-h2jqp\" (UID: \"576800d7-526c-45d9-b03b-5562a7f80352\") " pod="openshift-marketplace/redhat-operators-h2jqp" Jan 30 11:34:00 crc kubenswrapper[4869]: I0130 11:34:00.010371 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/576800d7-526c-45d9-b03b-5562a7f80352-catalog-content\") pod \"redhat-operators-h2jqp\" (UID: \"576800d7-526c-45d9-b03b-5562a7f80352\") " pod="openshift-marketplace/redhat-operators-h2jqp" Jan 30 11:34:00 crc kubenswrapper[4869]: I0130 11:34:00.010444 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/576800d7-526c-45d9-b03b-5562a7f80352-utilities\") pod \"redhat-operators-h2jqp\" (UID: \"576800d7-526c-45d9-b03b-5562a7f80352\") " pod="openshift-marketplace/redhat-operators-h2jqp" Jan 30 11:34:00 crc kubenswrapper[4869]: I0130 11:34:00.010488 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjsqp\" (UniqueName: \"kubernetes.io/projected/576800d7-526c-45d9-b03b-5562a7f80352-kube-api-access-zjsqp\") pod \"redhat-operators-h2jqp\" (UID: \"576800d7-526c-45d9-b03b-5562a7f80352\") " pod="openshift-marketplace/redhat-operators-h2jqp" Jan 30 11:34:00 crc kubenswrapper[4869]: I0130 11:34:00.011020 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/576800d7-526c-45d9-b03b-5562a7f80352-catalog-content\") pod \"redhat-operators-h2jqp\" (UID: \"576800d7-526c-45d9-b03b-5562a7f80352\") " pod="openshift-marketplace/redhat-operators-h2jqp" Jan 30 11:34:00 crc kubenswrapper[4869]: I0130 11:34:00.011075 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/576800d7-526c-45d9-b03b-5562a7f80352-utilities\") pod \"redhat-operators-h2jqp\" (UID: \"576800d7-526c-45d9-b03b-5562a7f80352\") " pod="openshift-marketplace/redhat-operators-h2jqp" Jan 30 11:34:00 crc kubenswrapper[4869]: I0130 11:34:00.037292 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjsqp\" (UniqueName: \"kubernetes.io/projected/576800d7-526c-45d9-b03b-5562a7f80352-kube-api-access-zjsqp\") pod \"redhat-operators-h2jqp\" (UID: \"576800d7-526c-45d9-b03b-5562a7f80352\") " pod="openshift-marketplace/redhat-operators-h2jqp" Jan 30 11:34:00 crc kubenswrapper[4869]: I0130 11:34:00.146040 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h2jqp" Jan 30 11:34:00 crc kubenswrapper[4869]: I0130 11:34:00.375554 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-h2jqp"] Jan 30 11:34:00 crc kubenswrapper[4869]: I0130 11:34:00.758162 4869 generic.go:334] "Generic (PLEG): container finished" podID="576800d7-526c-45d9-b03b-5562a7f80352" containerID="128752bd6962230ec3a3c35e9fe64026596b8d0dc06428bd21deab4133f36fbe" exitCode=0 Jan 30 11:34:00 crc kubenswrapper[4869]: I0130 11:34:00.758220 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h2jqp" event={"ID":"576800d7-526c-45d9-b03b-5562a7f80352","Type":"ContainerDied","Data":"128752bd6962230ec3a3c35e9fe64026596b8d0dc06428bd21deab4133f36fbe"} Jan 30 11:34:00 crc kubenswrapper[4869]: I0130 11:34:00.758260 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h2jqp" event={"ID":"576800d7-526c-45d9-b03b-5562a7f80352","Type":"ContainerStarted","Data":"71a7a9e7473c4555d36f8d500d414f06f9854b3dfbedf2dd4171f18b3bcd6e00"} Jan 30 11:34:00 crc kubenswrapper[4869]: I0130 11:34:00.760379 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 11:34:01 crc kubenswrapper[4869]: I0130 11:34:01.768067 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h2jqp" event={"ID":"576800d7-526c-45d9-b03b-5562a7f80352","Type":"ContainerStarted","Data":"c6fcdd013006ae11eb63bb499a4cb17a7a03003ad343f992448c94e782b9f401"} Jan 30 11:34:02 crc kubenswrapper[4869]: I0130 11:34:02.777284 4869 generic.go:334] "Generic (PLEG): container finished" podID="576800d7-526c-45d9-b03b-5562a7f80352" containerID="c6fcdd013006ae11eb63bb499a4cb17a7a03003ad343f992448c94e782b9f401" exitCode=0 Jan 30 11:34:02 crc kubenswrapper[4869]: I0130 11:34:02.777330 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h2jqp" event={"ID":"576800d7-526c-45d9-b03b-5562a7f80352","Type":"ContainerDied","Data":"c6fcdd013006ae11eb63bb499a4cb17a7a03003ad343f992448c94e782b9f401"} Jan 30 11:34:03 crc kubenswrapper[4869]: I0130 11:34:03.785083 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h2jqp" event={"ID":"576800d7-526c-45d9-b03b-5562a7f80352","Type":"ContainerStarted","Data":"054c161acf7c2d377436b62b0a5c5a27dbe1fd6fc92ce3e57c4c0d226d53849f"} Jan 30 11:34:03 crc kubenswrapper[4869]: I0130 11:34:03.803859 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-h2jqp" podStartSLOduration=2.139145958 podStartE2EDuration="4.803840039s" podCreationTimestamp="2026-01-30 11:33:59 +0000 UTC" firstStartedPulling="2026-01-30 11:34:00.760096645 +0000 UTC m=+2391.309972711" lastFinishedPulling="2026-01-30 11:34:03.424790716 +0000 UTC m=+2393.974666792" observedRunningTime="2026-01-30 11:34:03.802681606 +0000 UTC m=+2394.352557672" watchObservedRunningTime="2026-01-30 11:34:03.803840039 +0000 UTC m=+2394.353716115" Jan 30 11:34:10 crc kubenswrapper[4869]: I0130 11:34:10.146671 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-h2jqp" Jan 30 11:34:10 crc kubenswrapper[4869]: I0130 11:34:10.147170 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-h2jqp" Jan 30 11:34:10 crc kubenswrapper[4869]: I0130 11:34:10.192691 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-h2jqp" Jan 30 11:34:10 crc kubenswrapper[4869]: I0130 11:34:10.880663 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-h2jqp" Jan 30 11:34:10 crc kubenswrapper[4869]: I0130 11:34:10.922936 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-h2jqp"] Jan 30 11:34:12 crc kubenswrapper[4869]: I0130 11:34:12.847837 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-h2jqp" podUID="576800d7-526c-45d9-b03b-5562a7f80352" containerName="registry-server" containerID="cri-o://054c161acf7c2d377436b62b0a5c5a27dbe1fd6fc92ce3e57c4c0d226d53849f" gracePeriod=2 Jan 30 11:34:14 crc kubenswrapper[4869]: I0130 11:34:14.868416 4869 generic.go:334] "Generic (PLEG): container finished" podID="576800d7-526c-45d9-b03b-5562a7f80352" containerID="054c161acf7c2d377436b62b0a5c5a27dbe1fd6fc92ce3e57c4c0d226d53849f" exitCode=0 Jan 30 11:34:14 crc kubenswrapper[4869]: I0130 11:34:14.868501 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h2jqp" event={"ID":"576800d7-526c-45d9-b03b-5562a7f80352","Type":"ContainerDied","Data":"054c161acf7c2d377436b62b0a5c5a27dbe1fd6fc92ce3e57c4c0d226d53849f"} Jan 30 11:34:15 crc kubenswrapper[4869]: I0130 11:34:15.876462 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h2jqp" event={"ID":"576800d7-526c-45d9-b03b-5562a7f80352","Type":"ContainerDied","Data":"71a7a9e7473c4555d36f8d500d414f06f9854b3dfbedf2dd4171f18b3bcd6e00"} Jan 30 11:34:15 crc kubenswrapper[4869]: I0130 11:34:15.876729 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="71a7a9e7473c4555d36f8d500d414f06f9854b3dfbedf2dd4171f18b3bcd6e00" Jan 30 11:34:15 crc kubenswrapper[4869]: I0130 11:34:15.926309 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h2jqp" Jan 30 11:34:16 crc kubenswrapper[4869]: I0130 11:34:16.039196 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/576800d7-526c-45d9-b03b-5562a7f80352-catalog-content\") pod \"576800d7-526c-45d9-b03b-5562a7f80352\" (UID: \"576800d7-526c-45d9-b03b-5562a7f80352\") " Jan 30 11:34:16 crc kubenswrapper[4869]: I0130 11:34:16.039294 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zjsqp\" (UniqueName: \"kubernetes.io/projected/576800d7-526c-45d9-b03b-5562a7f80352-kube-api-access-zjsqp\") pod \"576800d7-526c-45d9-b03b-5562a7f80352\" (UID: \"576800d7-526c-45d9-b03b-5562a7f80352\") " Jan 30 11:34:16 crc kubenswrapper[4869]: I0130 11:34:16.039350 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/576800d7-526c-45d9-b03b-5562a7f80352-utilities\") pod \"576800d7-526c-45d9-b03b-5562a7f80352\" (UID: \"576800d7-526c-45d9-b03b-5562a7f80352\") " Jan 30 11:34:16 crc kubenswrapper[4869]: I0130 11:34:16.040544 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/576800d7-526c-45d9-b03b-5562a7f80352-utilities" (OuterVolumeSpecName: "utilities") pod "576800d7-526c-45d9-b03b-5562a7f80352" (UID: "576800d7-526c-45d9-b03b-5562a7f80352"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:34:16 crc kubenswrapper[4869]: I0130 11:34:16.045160 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/576800d7-526c-45d9-b03b-5562a7f80352-kube-api-access-zjsqp" (OuterVolumeSpecName: "kube-api-access-zjsqp") pod "576800d7-526c-45d9-b03b-5562a7f80352" (UID: "576800d7-526c-45d9-b03b-5562a7f80352"). InnerVolumeSpecName "kube-api-access-zjsqp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:34:16 crc kubenswrapper[4869]: I0130 11:34:16.140862 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/576800d7-526c-45d9-b03b-5562a7f80352-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 11:34:16 crc kubenswrapper[4869]: I0130 11:34:16.140901 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zjsqp\" (UniqueName: \"kubernetes.io/projected/576800d7-526c-45d9-b03b-5562a7f80352-kube-api-access-zjsqp\") on node \"crc\" DevicePath \"\"" Jan 30 11:34:16 crc kubenswrapper[4869]: I0130 11:34:16.177666 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/576800d7-526c-45d9-b03b-5562a7f80352-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "576800d7-526c-45d9-b03b-5562a7f80352" (UID: "576800d7-526c-45d9-b03b-5562a7f80352"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:34:16 crc kubenswrapper[4869]: I0130 11:34:16.241806 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/576800d7-526c-45d9-b03b-5562a7f80352-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 11:34:16 crc kubenswrapper[4869]: I0130 11:34:16.897414 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h2jqp" Jan 30 11:34:16 crc kubenswrapper[4869]: I0130 11:34:16.931537 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-h2jqp"] Jan 30 11:34:16 crc kubenswrapper[4869]: I0130 11:34:16.937672 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-h2jqp"] Jan 30 11:34:18 crc kubenswrapper[4869]: I0130 11:34:18.142070 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="576800d7-526c-45d9-b03b-5562a7f80352" path="/var/lib/kubelet/pods/576800d7-526c-45d9-b03b-5562a7f80352/volumes" Jan 30 11:34:21 crc kubenswrapper[4869]: I0130 11:34:21.768914 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:34:21 crc kubenswrapper[4869]: I0130 11:34:21.769260 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:34:21 crc kubenswrapper[4869]: I0130 11:34:21.769313 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 11:34:21 crc kubenswrapper[4869]: I0130 11:34:21.770852 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee"} pod="openshift-machine-config-operator/machine-config-daemon-99lr2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 11:34:21 crc kubenswrapper[4869]: I0130 11:34:21.770963 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" containerID="cri-o://b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" gracePeriod=600 Jan 30 11:34:21 crc kubenswrapper[4869]: E0130 11:34:21.892136 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:34:21 crc kubenswrapper[4869]: I0130 11:34:21.937197 4869 generic.go:334] "Generic (PLEG): container finished" podID="ef13186b-7f82-4025-97e3-d899be8c207f" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" exitCode=0 Jan 30 11:34:21 crc kubenswrapper[4869]: I0130 11:34:21.937243 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerDied","Data":"b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee"} Jan 30 11:34:21 crc kubenswrapper[4869]: I0130 11:34:21.937282 4869 scope.go:117] "RemoveContainer" containerID="463507ab061d257eb1b3adce78dea16b402a8685a0d66911924501e5413965d1" Jan 30 11:34:21 crc kubenswrapper[4869]: I0130 11:34:21.937904 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:34:21 crc kubenswrapper[4869]: E0130 11:34:21.938205 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:34:35 crc kubenswrapper[4869]: I0130 11:34:35.132738 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:34:35 crc kubenswrapper[4869]: E0130 11:34:35.133391 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:34:49 crc kubenswrapper[4869]: I0130 11:34:49.133371 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:34:49 crc kubenswrapper[4869]: E0130 11:34:49.134139 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:35:02 crc kubenswrapper[4869]: I0130 11:35:02.132867 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:35:02 crc kubenswrapper[4869]: E0130 11:35:02.133526 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:35:14 crc kubenswrapper[4869]: I0130 11:35:14.133756 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:35:14 crc kubenswrapper[4869]: E0130 11:35:14.134485 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:35:29 crc kubenswrapper[4869]: I0130 11:35:29.133636 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:35:29 crc kubenswrapper[4869]: E0130 11:35:29.137482 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:35:40 crc kubenswrapper[4869]: I0130 11:35:40.140904 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:35:40 crc kubenswrapper[4869]: E0130 11:35:40.141907 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:35:53 crc kubenswrapper[4869]: I0130 11:35:53.133135 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:35:53 crc kubenswrapper[4869]: E0130 11:35:53.134235 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:36:05 crc kubenswrapper[4869]: I0130 11:36:05.132392 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:36:05 crc kubenswrapper[4869]: E0130 11:36:05.133151 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:36:18 crc kubenswrapper[4869]: I0130 11:36:18.132552 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:36:18 crc kubenswrapper[4869]: E0130 11:36:18.133295 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:36:32 crc kubenswrapper[4869]: I0130 11:36:32.140547 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:36:32 crc kubenswrapper[4869]: E0130 11:36:32.141234 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:36:47 crc kubenswrapper[4869]: I0130 11:36:47.133368 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:36:47 crc kubenswrapper[4869]: E0130 11:36:47.134100 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:37:01 crc kubenswrapper[4869]: I0130 11:37:01.133393 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:37:01 crc kubenswrapper[4869]: E0130 11:37:01.134060 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:37:15 crc kubenswrapper[4869]: I0130 11:37:15.133574 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:37:15 crc kubenswrapper[4869]: E0130 11:37:15.134318 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:37:28 crc kubenswrapper[4869]: I0130 11:37:28.133303 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:37:28 crc kubenswrapper[4869]: E0130 11:37:28.134524 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:37:39 crc kubenswrapper[4869]: I0130 11:37:39.132820 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:37:39 crc kubenswrapper[4869]: E0130 11:37:39.133464 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:37:52 crc kubenswrapper[4869]: I0130 11:37:52.133237 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:37:52 crc kubenswrapper[4869]: E0130 11:37:52.133964 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:38:01 crc kubenswrapper[4869]: I0130 11:38:01.280259 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2xfk7"] Jan 30 11:38:01 crc kubenswrapper[4869]: E0130 11:38:01.281466 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="576800d7-526c-45d9-b03b-5562a7f80352" containerName="registry-server" Jan 30 11:38:01 crc kubenswrapper[4869]: I0130 11:38:01.281484 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="576800d7-526c-45d9-b03b-5562a7f80352" containerName="registry-server" Jan 30 11:38:01 crc kubenswrapper[4869]: E0130 11:38:01.281515 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="576800d7-526c-45d9-b03b-5562a7f80352" containerName="extract-content" Jan 30 11:38:01 crc kubenswrapper[4869]: I0130 11:38:01.281525 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="576800d7-526c-45d9-b03b-5562a7f80352" containerName="extract-content" Jan 30 11:38:01 crc kubenswrapper[4869]: E0130 11:38:01.281551 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="576800d7-526c-45d9-b03b-5562a7f80352" containerName="extract-utilities" Jan 30 11:38:01 crc kubenswrapper[4869]: I0130 11:38:01.281560 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="576800d7-526c-45d9-b03b-5562a7f80352" containerName="extract-utilities" Jan 30 11:38:01 crc kubenswrapper[4869]: I0130 11:38:01.281766 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="576800d7-526c-45d9-b03b-5562a7f80352" containerName="registry-server" Jan 30 11:38:01 crc kubenswrapper[4869]: I0130 11:38:01.283246 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2xfk7" Jan 30 11:38:01 crc kubenswrapper[4869]: I0130 11:38:01.296004 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2xfk7"] Jan 30 11:38:01 crc kubenswrapper[4869]: I0130 11:38:01.340431 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ed1edc5-1c61-4871-a7f4-ecd12828de65-utilities\") pod \"community-operators-2xfk7\" (UID: \"3ed1edc5-1c61-4871-a7f4-ecd12828de65\") " pod="openshift-marketplace/community-operators-2xfk7" Jan 30 11:38:01 crc kubenswrapper[4869]: I0130 11:38:01.340828 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdmgw\" (UniqueName: \"kubernetes.io/projected/3ed1edc5-1c61-4871-a7f4-ecd12828de65-kube-api-access-cdmgw\") pod \"community-operators-2xfk7\" (UID: \"3ed1edc5-1c61-4871-a7f4-ecd12828de65\") " pod="openshift-marketplace/community-operators-2xfk7" Jan 30 11:38:01 crc kubenswrapper[4869]: I0130 11:38:01.340888 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ed1edc5-1c61-4871-a7f4-ecd12828de65-catalog-content\") pod \"community-operators-2xfk7\" (UID: \"3ed1edc5-1c61-4871-a7f4-ecd12828de65\") " pod="openshift-marketplace/community-operators-2xfk7" Jan 30 11:38:01 crc kubenswrapper[4869]: I0130 11:38:01.442752 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ed1edc5-1c61-4871-a7f4-ecd12828de65-utilities\") pod \"community-operators-2xfk7\" (UID: \"3ed1edc5-1c61-4871-a7f4-ecd12828de65\") " pod="openshift-marketplace/community-operators-2xfk7" Jan 30 11:38:01 crc kubenswrapper[4869]: I0130 11:38:01.443110 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdmgw\" (UniqueName: \"kubernetes.io/projected/3ed1edc5-1c61-4871-a7f4-ecd12828de65-kube-api-access-cdmgw\") pod \"community-operators-2xfk7\" (UID: \"3ed1edc5-1c61-4871-a7f4-ecd12828de65\") " pod="openshift-marketplace/community-operators-2xfk7" Jan 30 11:38:01 crc kubenswrapper[4869]: I0130 11:38:01.443613 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ed1edc5-1c61-4871-a7f4-ecd12828de65-catalog-content\") pod \"community-operators-2xfk7\" (UID: \"3ed1edc5-1c61-4871-a7f4-ecd12828de65\") " pod="openshift-marketplace/community-operators-2xfk7" Jan 30 11:38:01 crc kubenswrapper[4869]: I0130 11:38:01.443324 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ed1edc5-1c61-4871-a7f4-ecd12828de65-utilities\") pod \"community-operators-2xfk7\" (UID: \"3ed1edc5-1c61-4871-a7f4-ecd12828de65\") " pod="openshift-marketplace/community-operators-2xfk7" Jan 30 11:38:01 crc kubenswrapper[4869]: I0130 11:38:01.444140 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ed1edc5-1c61-4871-a7f4-ecd12828de65-catalog-content\") pod \"community-operators-2xfk7\" (UID: \"3ed1edc5-1c61-4871-a7f4-ecd12828de65\") " pod="openshift-marketplace/community-operators-2xfk7" Jan 30 11:38:01 crc kubenswrapper[4869]: I0130 11:38:01.461951 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdmgw\" (UniqueName: \"kubernetes.io/projected/3ed1edc5-1c61-4871-a7f4-ecd12828de65-kube-api-access-cdmgw\") pod \"community-operators-2xfk7\" (UID: \"3ed1edc5-1c61-4871-a7f4-ecd12828de65\") " pod="openshift-marketplace/community-operators-2xfk7" Jan 30 11:38:01 crc kubenswrapper[4869]: I0130 11:38:01.614967 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2xfk7" Jan 30 11:38:01 crc kubenswrapper[4869]: I0130 11:38:01.963468 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2xfk7"] Jan 30 11:38:01 crc kubenswrapper[4869]: W0130 11:38:01.971751 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3ed1edc5_1c61_4871_a7f4_ecd12828de65.slice/crio-7d143d7145f36829518082ac77221326f765dcb5ae27ac4327b010badb540b0f WatchSource:0}: Error finding container 7d143d7145f36829518082ac77221326f765dcb5ae27ac4327b010badb540b0f: Status 404 returned error can't find the container with id 7d143d7145f36829518082ac77221326f765dcb5ae27ac4327b010badb540b0f Jan 30 11:38:02 crc kubenswrapper[4869]: I0130 11:38:02.463569 4869 generic.go:334] "Generic (PLEG): container finished" podID="3ed1edc5-1c61-4871-a7f4-ecd12828de65" containerID="b23fc9381d4e1a32e70e05de37496237cad41f0beaa754606c62e57f8250430a" exitCode=0 Jan 30 11:38:02 crc kubenswrapper[4869]: I0130 11:38:02.463627 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2xfk7" event={"ID":"3ed1edc5-1c61-4871-a7f4-ecd12828de65","Type":"ContainerDied","Data":"b23fc9381d4e1a32e70e05de37496237cad41f0beaa754606c62e57f8250430a"} Jan 30 11:38:02 crc kubenswrapper[4869]: I0130 11:38:02.463657 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2xfk7" event={"ID":"3ed1edc5-1c61-4871-a7f4-ecd12828de65","Type":"ContainerStarted","Data":"7d143d7145f36829518082ac77221326f765dcb5ae27ac4327b010badb540b0f"} Jan 30 11:38:03 crc kubenswrapper[4869]: I0130 11:38:03.475641 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2xfk7" event={"ID":"3ed1edc5-1c61-4871-a7f4-ecd12828de65","Type":"ContainerStarted","Data":"0448c4a07cf67b5ca17aefa0df905f6d8cefa9ac62bedd6bd324fb3fdb025f45"} Jan 30 11:38:04 crc kubenswrapper[4869]: I0130 11:38:04.483451 4869 generic.go:334] "Generic (PLEG): container finished" podID="3ed1edc5-1c61-4871-a7f4-ecd12828de65" containerID="0448c4a07cf67b5ca17aefa0df905f6d8cefa9ac62bedd6bd324fb3fdb025f45" exitCode=0 Jan 30 11:38:04 crc kubenswrapper[4869]: I0130 11:38:04.483520 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2xfk7" event={"ID":"3ed1edc5-1c61-4871-a7f4-ecd12828de65","Type":"ContainerDied","Data":"0448c4a07cf67b5ca17aefa0df905f6d8cefa9ac62bedd6bd324fb3fdb025f45"} Jan 30 11:38:05 crc kubenswrapper[4869]: I0130 11:38:05.493150 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2xfk7" event={"ID":"3ed1edc5-1c61-4871-a7f4-ecd12828de65","Type":"ContainerStarted","Data":"db699dda1cdadd3b7fe8425d5c619fc1b43a400ebb1922d1a92b4e29099b4c19"} Jan 30 11:38:05 crc kubenswrapper[4869]: I0130 11:38:05.523223 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2xfk7" podStartSLOduration=2.092223158 podStartE2EDuration="4.523203826s" podCreationTimestamp="2026-01-30 11:38:01 +0000 UTC" firstStartedPulling="2026-01-30 11:38:02.465750253 +0000 UTC m=+2633.015626319" lastFinishedPulling="2026-01-30 11:38:04.896730921 +0000 UTC m=+2635.446606987" observedRunningTime="2026-01-30 11:38:05.518345158 +0000 UTC m=+2636.068221224" watchObservedRunningTime="2026-01-30 11:38:05.523203826 +0000 UTC m=+2636.073079892" Jan 30 11:38:07 crc kubenswrapper[4869]: I0130 11:38:07.133218 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:38:07 crc kubenswrapper[4869]: E0130 11:38:07.133597 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:38:11 crc kubenswrapper[4869]: I0130 11:38:11.616374 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2xfk7" Jan 30 11:38:11 crc kubenswrapper[4869]: I0130 11:38:11.616704 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2xfk7" Jan 30 11:38:11 crc kubenswrapper[4869]: I0130 11:38:11.676383 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2xfk7" Jan 30 11:38:12 crc kubenswrapper[4869]: I0130 11:38:12.593277 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2xfk7" Jan 30 11:38:12 crc kubenswrapper[4869]: I0130 11:38:12.637815 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2xfk7"] Jan 30 11:38:14 crc kubenswrapper[4869]: I0130 11:38:14.563852 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2xfk7" podUID="3ed1edc5-1c61-4871-a7f4-ecd12828de65" containerName="registry-server" containerID="cri-o://db699dda1cdadd3b7fe8425d5c619fc1b43a400ebb1922d1a92b4e29099b4c19" gracePeriod=2 Jan 30 11:38:14 crc kubenswrapper[4869]: I0130 11:38:14.921427 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2xfk7" Jan 30 11:38:14 crc kubenswrapper[4869]: I0130 11:38:14.932267 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdmgw\" (UniqueName: \"kubernetes.io/projected/3ed1edc5-1c61-4871-a7f4-ecd12828de65-kube-api-access-cdmgw\") pod \"3ed1edc5-1c61-4871-a7f4-ecd12828de65\" (UID: \"3ed1edc5-1c61-4871-a7f4-ecd12828de65\") " Jan 30 11:38:14 crc kubenswrapper[4869]: I0130 11:38:14.932358 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ed1edc5-1c61-4871-a7f4-ecd12828de65-catalog-content\") pod \"3ed1edc5-1c61-4871-a7f4-ecd12828de65\" (UID: \"3ed1edc5-1c61-4871-a7f4-ecd12828de65\") " Jan 30 11:38:14 crc kubenswrapper[4869]: I0130 11:38:14.932516 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ed1edc5-1c61-4871-a7f4-ecd12828de65-utilities\") pod \"3ed1edc5-1c61-4871-a7f4-ecd12828de65\" (UID: \"3ed1edc5-1c61-4871-a7f4-ecd12828de65\") " Jan 30 11:38:14 crc kubenswrapper[4869]: I0130 11:38:14.933846 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ed1edc5-1c61-4871-a7f4-ecd12828de65-utilities" (OuterVolumeSpecName: "utilities") pod "3ed1edc5-1c61-4871-a7f4-ecd12828de65" (UID: "3ed1edc5-1c61-4871-a7f4-ecd12828de65"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:38:14 crc kubenswrapper[4869]: I0130 11:38:14.942326 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ed1edc5-1c61-4871-a7f4-ecd12828de65-kube-api-access-cdmgw" (OuterVolumeSpecName: "kube-api-access-cdmgw") pod "3ed1edc5-1c61-4871-a7f4-ecd12828de65" (UID: "3ed1edc5-1c61-4871-a7f4-ecd12828de65"). InnerVolumeSpecName "kube-api-access-cdmgw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:38:14 crc kubenswrapper[4869]: I0130 11:38:14.997107 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ed1edc5-1c61-4871-a7f4-ecd12828de65-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3ed1edc5-1c61-4871-a7f4-ecd12828de65" (UID: "3ed1edc5-1c61-4871-a7f4-ecd12828de65"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:38:15 crc kubenswrapper[4869]: I0130 11:38:15.033240 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdmgw\" (UniqueName: \"kubernetes.io/projected/3ed1edc5-1c61-4871-a7f4-ecd12828de65-kube-api-access-cdmgw\") on node \"crc\" DevicePath \"\"" Jan 30 11:38:15 crc kubenswrapper[4869]: I0130 11:38:15.033481 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ed1edc5-1c61-4871-a7f4-ecd12828de65-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 11:38:15 crc kubenswrapper[4869]: I0130 11:38:15.033543 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ed1edc5-1c61-4871-a7f4-ecd12828de65-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 11:38:15 crc kubenswrapper[4869]: I0130 11:38:15.574694 4869 generic.go:334] "Generic (PLEG): container finished" podID="3ed1edc5-1c61-4871-a7f4-ecd12828de65" containerID="db699dda1cdadd3b7fe8425d5c619fc1b43a400ebb1922d1a92b4e29099b4c19" exitCode=0 Jan 30 11:38:15 crc kubenswrapper[4869]: I0130 11:38:15.574770 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2xfk7" Jan 30 11:38:15 crc kubenswrapper[4869]: I0130 11:38:15.574797 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2xfk7" event={"ID":"3ed1edc5-1c61-4871-a7f4-ecd12828de65","Type":"ContainerDied","Data":"db699dda1cdadd3b7fe8425d5c619fc1b43a400ebb1922d1a92b4e29099b4c19"} Jan 30 11:38:15 crc kubenswrapper[4869]: I0130 11:38:15.575453 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2xfk7" event={"ID":"3ed1edc5-1c61-4871-a7f4-ecd12828de65","Type":"ContainerDied","Data":"7d143d7145f36829518082ac77221326f765dcb5ae27ac4327b010badb540b0f"} Jan 30 11:38:15 crc kubenswrapper[4869]: I0130 11:38:15.575492 4869 scope.go:117] "RemoveContainer" containerID="db699dda1cdadd3b7fe8425d5c619fc1b43a400ebb1922d1a92b4e29099b4c19" Jan 30 11:38:15 crc kubenswrapper[4869]: I0130 11:38:15.619898 4869 scope.go:117] "RemoveContainer" containerID="0448c4a07cf67b5ca17aefa0df905f6d8cefa9ac62bedd6bd324fb3fdb025f45" Jan 30 11:38:15 crc kubenswrapper[4869]: I0130 11:38:15.624482 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2xfk7"] Jan 30 11:38:15 crc kubenswrapper[4869]: I0130 11:38:15.632867 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2xfk7"] Jan 30 11:38:15 crc kubenswrapper[4869]: I0130 11:38:15.651461 4869 scope.go:117] "RemoveContainer" containerID="b23fc9381d4e1a32e70e05de37496237cad41f0beaa754606c62e57f8250430a" Jan 30 11:38:15 crc kubenswrapper[4869]: I0130 11:38:15.674476 4869 scope.go:117] "RemoveContainer" containerID="db699dda1cdadd3b7fe8425d5c619fc1b43a400ebb1922d1a92b4e29099b4c19" Jan 30 11:38:15 crc kubenswrapper[4869]: E0130 11:38:15.675026 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db699dda1cdadd3b7fe8425d5c619fc1b43a400ebb1922d1a92b4e29099b4c19\": container with ID starting with db699dda1cdadd3b7fe8425d5c619fc1b43a400ebb1922d1a92b4e29099b4c19 not found: ID does not exist" containerID="db699dda1cdadd3b7fe8425d5c619fc1b43a400ebb1922d1a92b4e29099b4c19" Jan 30 11:38:15 crc kubenswrapper[4869]: I0130 11:38:15.675059 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db699dda1cdadd3b7fe8425d5c619fc1b43a400ebb1922d1a92b4e29099b4c19"} err="failed to get container status \"db699dda1cdadd3b7fe8425d5c619fc1b43a400ebb1922d1a92b4e29099b4c19\": rpc error: code = NotFound desc = could not find container \"db699dda1cdadd3b7fe8425d5c619fc1b43a400ebb1922d1a92b4e29099b4c19\": container with ID starting with db699dda1cdadd3b7fe8425d5c619fc1b43a400ebb1922d1a92b4e29099b4c19 not found: ID does not exist" Jan 30 11:38:15 crc kubenswrapper[4869]: I0130 11:38:15.675087 4869 scope.go:117] "RemoveContainer" containerID="0448c4a07cf67b5ca17aefa0df905f6d8cefa9ac62bedd6bd324fb3fdb025f45" Jan 30 11:38:15 crc kubenswrapper[4869]: E0130 11:38:15.675293 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0448c4a07cf67b5ca17aefa0df905f6d8cefa9ac62bedd6bd324fb3fdb025f45\": container with ID starting with 0448c4a07cf67b5ca17aefa0df905f6d8cefa9ac62bedd6bd324fb3fdb025f45 not found: ID does not exist" containerID="0448c4a07cf67b5ca17aefa0df905f6d8cefa9ac62bedd6bd324fb3fdb025f45" Jan 30 11:38:15 crc kubenswrapper[4869]: I0130 11:38:15.675308 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0448c4a07cf67b5ca17aefa0df905f6d8cefa9ac62bedd6bd324fb3fdb025f45"} err="failed to get container status \"0448c4a07cf67b5ca17aefa0df905f6d8cefa9ac62bedd6bd324fb3fdb025f45\": rpc error: code = NotFound desc = could not find container \"0448c4a07cf67b5ca17aefa0df905f6d8cefa9ac62bedd6bd324fb3fdb025f45\": container with ID starting with 0448c4a07cf67b5ca17aefa0df905f6d8cefa9ac62bedd6bd324fb3fdb025f45 not found: ID does not exist" Jan 30 11:38:15 crc kubenswrapper[4869]: I0130 11:38:15.675321 4869 scope.go:117] "RemoveContainer" containerID="b23fc9381d4e1a32e70e05de37496237cad41f0beaa754606c62e57f8250430a" Jan 30 11:38:15 crc kubenswrapper[4869]: E0130 11:38:15.675590 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b23fc9381d4e1a32e70e05de37496237cad41f0beaa754606c62e57f8250430a\": container with ID starting with b23fc9381d4e1a32e70e05de37496237cad41f0beaa754606c62e57f8250430a not found: ID does not exist" containerID="b23fc9381d4e1a32e70e05de37496237cad41f0beaa754606c62e57f8250430a" Jan 30 11:38:15 crc kubenswrapper[4869]: I0130 11:38:15.675618 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b23fc9381d4e1a32e70e05de37496237cad41f0beaa754606c62e57f8250430a"} err="failed to get container status \"b23fc9381d4e1a32e70e05de37496237cad41f0beaa754606c62e57f8250430a\": rpc error: code = NotFound desc = could not find container \"b23fc9381d4e1a32e70e05de37496237cad41f0beaa754606c62e57f8250430a\": container with ID starting with b23fc9381d4e1a32e70e05de37496237cad41f0beaa754606c62e57f8250430a not found: ID does not exist" Jan 30 11:38:16 crc kubenswrapper[4869]: I0130 11:38:16.145574 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ed1edc5-1c61-4871-a7f4-ecd12828de65" path="/var/lib/kubelet/pods/3ed1edc5-1c61-4871-a7f4-ecd12828de65/volumes" Jan 30 11:38:21 crc kubenswrapper[4869]: I0130 11:38:21.133066 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:38:21 crc kubenswrapper[4869]: E0130 11:38:21.133419 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:38:32 crc kubenswrapper[4869]: I0130 11:38:32.133410 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:38:32 crc kubenswrapper[4869]: E0130 11:38:32.134107 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:38:44 crc kubenswrapper[4869]: I0130 11:38:44.133225 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:38:44 crc kubenswrapper[4869]: E0130 11:38:44.134047 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:38:56 crc kubenswrapper[4869]: I0130 11:38:56.134660 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:38:56 crc kubenswrapper[4869]: E0130 11:38:56.135505 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:39:08 crc kubenswrapper[4869]: I0130 11:39:08.133125 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:39:08 crc kubenswrapper[4869]: E0130 11:39:08.133892 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:39:19 crc kubenswrapper[4869]: I0130 11:39:19.133387 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:39:19 crc kubenswrapper[4869]: E0130 11:39:19.134169 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:39:30 crc kubenswrapper[4869]: I0130 11:39:30.136799 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:39:31 crc kubenswrapper[4869]: I0130 11:39:31.052964 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerStarted","Data":"11e487505cedf864c909a4aa93c760d70f9e55412d7ffbfe015dd2f6c2562247"} Jan 30 11:40:14 crc kubenswrapper[4869]: I0130 11:40:14.964954 4869 scope.go:117] "RemoveContainer" containerID="054c161acf7c2d377436b62b0a5c5a27dbe1fd6fc92ce3e57c4c0d226d53849f" Jan 30 11:40:14 crc kubenswrapper[4869]: I0130 11:40:14.982196 4869 scope.go:117] "RemoveContainer" containerID="128752bd6962230ec3a3c35e9fe64026596b8d0dc06428bd21deab4133f36fbe" Jan 30 11:40:14 crc kubenswrapper[4869]: I0130 11:40:14.998261 4869 scope.go:117] "RemoveContainer" containerID="c6fcdd013006ae11eb63bb499a4cb17a7a03003ad343f992448c94e782b9f401" Jan 30 11:41:51 crc kubenswrapper[4869]: I0130 11:41:51.103005 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jkm94"] Jan 30 11:41:51 crc kubenswrapper[4869]: E0130 11:41:51.103954 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ed1edc5-1c61-4871-a7f4-ecd12828de65" containerName="registry-server" Jan 30 11:41:51 crc kubenswrapper[4869]: I0130 11:41:51.103971 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ed1edc5-1c61-4871-a7f4-ecd12828de65" containerName="registry-server" Jan 30 11:41:51 crc kubenswrapper[4869]: E0130 11:41:51.103985 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ed1edc5-1c61-4871-a7f4-ecd12828de65" containerName="extract-content" Jan 30 11:41:51 crc kubenswrapper[4869]: I0130 11:41:51.103995 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ed1edc5-1c61-4871-a7f4-ecd12828de65" containerName="extract-content" Jan 30 11:41:51 crc kubenswrapper[4869]: E0130 11:41:51.104020 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ed1edc5-1c61-4871-a7f4-ecd12828de65" containerName="extract-utilities" Jan 30 11:41:51 crc kubenswrapper[4869]: I0130 11:41:51.104029 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ed1edc5-1c61-4871-a7f4-ecd12828de65" containerName="extract-utilities" Jan 30 11:41:51 crc kubenswrapper[4869]: I0130 11:41:51.104219 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ed1edc5-1c61-4871-a7f4-ecd12828de65" containerName="registry-server" Jan 30 11:41:51 crc kubenswrapper[4869]: I0130 11:41:51.105417 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jkm94" Jan 30 11:41:51 crc kubenswrapper[4869]: I0130 11:41:51.109119 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jkm94"] Jan 30 11:41:51 crc kubenswrapper[4869]: I0130 11:41:51.203110 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98qxs\" (UniqueName: \"kubernetes.io/projected/2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5-kube-api-access-98qxs\") pod \"certified-operators-jkm94\" (UID: \"2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5\") " pod="openshift-marketplace/certified-operators-jkm94" Jan 30 11:41:51 crc kubenswrapper[4869]: I0130 11:41:51.203192 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5-catalog-content\") pod \"certified-operators-jkm94\" (UID: \"2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5\") " pod="openshift-marketplace/certified-operators-jkm94" Jan 30 11:41:51 crc kubenswrapper[4869]: I0130 11:41:51.203222 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5-utilities\") pod \"certified-operators-jkm94\" (UID: \"2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5\") " pod="openshift-marketplace/certified-operators-jkm94" Jan 30 11:41:51 crc kubenswrapper[4869]: I0130 11:41:51.304637 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98qxs\" (UniqueName: \"kubernetes.io/projected/2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5-kube-api-access-98qxs\") pod \"certified-operators-jkm94\" (UID: \"2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5\") " pod="openshift-marketplace/certified-operators-jkm94" Jan 30 11:41:51 crc kubenswrapper[4869]: I0130 11:41:51.304691 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5-catalog-content\") pod \"certified-operators-jkm94\" (UID: \"2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5\") " pod="openshift-marketplace/certified-operators-jkm94" Jan 30 11:41:51 crc kubenswrapper[4869]: I0130 11:41:51.304775 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5-utilities\") pod \"certified-operators-jkm94\" (UID: \"2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5\") " pod="openshift-marketplace/certified-operators-jkm94" Jan 30 11:41:51 crc kubenswrapper[4869]: I0130 11:41:51.305355 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5-utilities\") pod \"certified-operators-jkm94\" (UID: \"2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5\") " pod="openshift-marketplace/certified-operators-jkm94" Jan 30 11:41:51 crc kubenswrapper[4869]: I0130 11:41:51.305412 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5-catalog-content\") pod \"certified-operators-jkm94\" (UID: \"2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5\") " pod="openshift-marketplace/certified-operators-jkm94" Jan 30 11:41:51 crc kubenswrapper[4869]: I0130 11:41:51.325127 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98qxs\" (UniqueName: \"kubernetes.io/projected/2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5-kube-api-access-98qxs\") pod \"certified-operators-jkm94\" (UID: \"2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5\") " pod="openshift-marketplace/certified-operators-jkm94" Jan 30 11:41:51 crc kubenswrapper[4869]: I0130 11:41:51.432244 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jkm94" Jan 30 11:41:51 crc kubenswrapper[4869]: I0130 11:41:51.691348 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jkm94"] Jan 30 11:41:51 crc kubenswrapper[4869]: I0130 11:41:51.769002 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:41:51 crc kubenswrapper[4869]: I0130 11:41:51.769059 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:41:52 crc kubenswrapper[4869]: I0130 11:41:52.091669 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hlkq2"] Jan 30 11:41:52 crc kubenswrapper[4869]: I0130 11:41:52.092890 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hlkq2" Jan 30 11:41:52 crc kubenswrapper[4869]: I0130 11:41:52.103334 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hlkq2"] Jan 30 11:41:52 crc kubenswrapper[4869]: I0130 11:41:52.222377 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86bfe561-d583-4254-a4d5-89207efbeaf3-catalog-content\") pod \"redhat-marketplace-hlkq2\" (UID: \"86bfe561-d583-4254-a4d5-89207efbeaf3\") " pod="openshift-marketplace/redhat-marketplace-hlkq2" Jan 30 11:41:52 crc kubenswrapper[4869]: I0130 11:41:52.222480 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghlk5\" (UniqueName: \"kubernetes.io/projected/86bfe561-d583-4254-a4d5-89207efbeaf3-kube-api-access-ghlk5\") pod \"redhat-marketplace-hlkq2\" (UID: \"86bfe561-d583-4254-a4d5-89207efbeaf3\") " pod="openshift-marketplace/redhat-marketplace-hlkq2" Jan 30 11:41:52 crc kubenswrapper[4869]: I0130 11:41:52.222564 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86bfe561-d583-4254-a4d5-89207efbeaf3-utilities\") pod \"redhat-marketplace-hlkq2\" (UID: \"86bfe561-d583-4254-a4d5-89207efbeaf3\") " pod="openshift-marketplace/redhat-marketplace-hlkq2" Jan 30 11:41:52 crc kubenswrapper[4869]: I0130 11:41:52.238134 4869 generic.go:334] "Generic (PLEG): container finished" podID="2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5" containerID="8a14d4c365b14cd44bcc03f8f830bffa99d3206e7ca5f8ca94f3e332e5547167" exitCode=0 Jan 30 11:41:52 crc kubenswrapper[4869]: I0130 11:41:52.238185 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkm94" event={"ID":"2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5","Type":"ContainerDied","Data":"8a14d4c365b14cd44bcc03f8f830bffa99d3206e7ca5f8ca94f3e332e5547167"} Jan 30 11:41:52 crc kubenswrapper[4869]: I0130 11:41:52.238217 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkm94" event={"ID":"2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5","Type":"ContainerStarted","Data":"b407e801324706a708f4cdcc0fc447bfea900ebc55d4a695265459e94ab275ea"} Jan 30 11:41:52 crc kubenswrapper[4869]: I0130 11:41:52.240086 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 11:41:52 crc kubenswrapper[4869]: I0130 11:41:52.323664 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86bfe561-d583-4254-a4d5-89207efbeaf3-utilities\") pod \"redhat-marketplace-hlkq2\" (UID: \"86bfe561-d583-4254-a4d5-89207efbeaf3\") " pod="openshift-marketplace/redhat-marketplace-hlkq2" Jan 30 11:41:52 crc kubenswrapper[4869]: I0130 11:41:52.323797 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86bfe561-d583-4254-a4d5-89207efbeaf3-catalog-content\") pod \"redhat-marketplace-hlkq2\" (UID: \"86bfe561-d583-4254-a4d5-89207efbeaf3\") " pod="openshift-marketplace/redhat-marketplace-hlkq2" Jan 30 11:41:52 crc kubenswrapper[4869]: I0130 11:41:52.323838 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghlk5\" (UniqueName: \"kubernetes.io/projected/86bfe561-d583-4254-a4d5-89207efbeaf3-kube-api-access-ghlk5\") pod \"redhat-marketplace-hlkq2\" (UID: \"86bfe561-d583-4254-a4d5-89207efbeaf3\") " pod="openshift-marketplace/redhat-marketplace-hlkq2" Jan 30 11:41:52 crc kubenswrapper[4869]: I0130 11:41:52.324195 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86bfe561-d583-4254-a4d5-89207efbeaf3-utilities\") pod \"redhat-marketplace-hlkq2\" (UID: \"86bfe561-d583-4254-a4d5-89207efbeaf3\") " pod="openshift-marketplace/redhat-marketplace-hlkq2" Jan 30 11:41:52 crc kubenswrapper[4869]: I0130 11:41:52.324394 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86bfe561-d583-4254-a4d5-89207efbeaf3-catalog-content\") pod \"redhat-marketplace-hlkq2\" (UID: \"86bfe561-d583-4254-a4d5-89207efbeaf3\") " pod="openshift-marketplace/redhat-marketplace-hlkq2" Jan 30 11:41:52 crc kubenswrapper[4869]: I0130 11:41:52.350318 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghlk5\" (UniqueName: \"kubernetes.io/projected/86bfe561-d583-4254-a4d5-89207efbeaf3-kube-api-access-ghlk5\") pod \"redhat-marketplace-hlkq2\" (UID: \"86bfe561-d583-4254-a4d5-89207efbeaf3\") " pod="openshift-marketplace/redhat-marketplace-hlkq2" Jan 30 11:41:52 crc kubenswrapper[4869]: I0130 11:41:52.421263 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hlkq2" Jan 30 11:41:52 crc kubenswrapper[4869]: I0130 11:41:52.706406 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hlkq2"] Jan 30 11:41:52 crc kubenswrapper[4869]: W0130 11:41:52.712882 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod86bfe561_d583_4254_a4d5_89207efbeaf3.slice/crio-5566faa94fd93fa3e988e92a269594dc8053cf0c6cc662d419e630469105cb82 WatchSource:0}: Error finding container 5566faa94fd93fa3e988e92a269594dc8053cf0c6cc662d419e630469105cb82: Status 404 returned error can't find the container with id 5566faa94fd93fa3e988e92a269594dc8053cf0c6cc662d419e630469105cb82 Jan 30 11:41:53 crc kubenswrapper[4869]: I0130 11:41:53.247212 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkm94" event={"ID":"2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5","Type":"ContainerStarted","Data":"df03698f45b607bc7b47c9d6c547396833626c10aa2302a85f9bad83753c30bb"} Jan 30 11:41:53 crc kubenswrapper[4869]: I0130 11:41:53.249905 4869 generic.go:334] "Generic (PLEG): container finished" podID="86bfe561-d583-4254-a4d5-89207efbeaf3" containerID="5fddd1de9585f1d67aa40780ea947d2ee91bfda12467c4f38b15ed84ca00cec8" exitCode=0 Jan 30 11:41:53 crc kubenswrapper[4869]: I0130 11:41:53.249933 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hlkq2" event={"ID":"86bfe561-d583-4254-a4d5-89207efbeaf3","Type":"ContainerDied","Data":"5fddd1de9585f1d67aa40780ea947d2ee91bfda12467c4f38b15ed84ca00cec8"} Jan 30 11:41:53 crc kubenswrapper[4869]: I0130 11:41:53.249949 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hlkq2" event={"ID":"86bfe561-d583-4254-a4d5-89207efbeaf3","Type":"ContainerStarted","Data":"5566faa94fd93fa3e988e92a269594dc8053cf0c6cc662d419e630469105cb82"} Jan 30 11:41:54 crc kubenswrapper[4869]: I0130 11:41:54.258098 4869 generic.go:334] "Generic (PLEG): container finished" podID="86bfe561-d583-4254-a4d5-89207efbeaf3" containerID="5e8ee9eaab27e4d824e5fc4707d6a9ae321334d1ac3eb1d161d3f1edd73bb3cc" exitCode=0 Jan 30 11:41:54 crc kubenswrapper[4869]: I0130 11:41:54.258155 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hlkq2" event={"ID":"86bfe561-d583-4254-a4d5-89207efbeaf3","Type":"ContainerDied","Data":"5e8ee9eaab27e4d824e5fc4707d6a9ae321334d1ac3eb1d161d3f1edd73bb3cc"} Jan 30 11:41:54 crc kubenswrapper[4869]: I0130 11:41:54.263203 4869 generic.go:334] "Generic (PLEG): container finished" podID="2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5" containerID="df03698f45b607bc7b47c9d6c547396833626c10aa2302a85f9bad83753c30bb" exitCode=0 Jan 30 11:41:54 crc kubenswrapper[4869]: I0130 11:41:54.263238 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkm94" event={"ID":"2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5","Type":"ContainerDied","Data":"df03698f45b607bc7b47c9d6c547396833626c10aa2302a85f9bad83753c30bb"} Jan 30 11:41:55 crc kubenswrapper[4869]: I0130 11:41:55.272767 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkm94" event={"ID":"2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5","Type":"ContainerStarted","Data":"43162f2a449f87ac67b01f3bb4f0c8f6ae10a7dba9531d1d972b28a9aad9b1dc"} Jan 30 11:41:55 crc kubenswrapper[4869]: I0130 11:41:55.275341 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hlkq2" event={"ID":"86bfe561-d583-4254-a4d5-89207efbeaf3","Type":"ContainerStarted","Data":"167dfebeb37febba4fb0d1b81aac116add41f745cbc50c91b5de2c42b86c43f4"} Jan 30 11:41:55 crc kubenswrapper[4869]: I0130 11:41:55.306844 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jkm94" podStartSLOduration=1.887680035 podStartE2EDuration="4.30682215s" podCreationTimestamp="2026-01-30 11:41:51 +0000 UTC" firstStartedPulling="2026-01-30 11:41:52.239865396 +0000 UTC m=+2862.789741462" lastFinishedPulling="2026-01-30 11:41:54.659007511 +0000 UTC m=+2865.208883577" observedRunningTime="2026-01-30 11:41:55.3011875 +0000 UTC m=+2865.851063576" watchObservedRunningTime="2026-01-30 11:41:55.30682215 +0000 UTC m=+2865.856698216" Jan 30 11:42:01 crc kubenswrapper[4869]: I0130 11:42:01.433390 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jkm94" Jan 30 11:42:01 crc kubenswrapper[4869]: I0130 11:42:01.433989 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jkm94" Jan 30 11:42:01 crc kubenswrapper[4869]: I0130 11:42:01.481862 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jkm94" Jan 30 11:42:01 crc kubenswrapper[4869]: I0130 11:42:01.508463 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hlkq2" podStartSLOduration=8.134750896 podStartE2EDuration="9.508438617s" podCreationTimestamp="2026-01-30 11:41:52 +0000 UTC" firstStartedPulling="2026-01-30 11:41:53.251340856 +0000 UTC m=+2863.801216922" lastFinishedPulling="2026-01-30 11:41:54.625028577 +0000 UTC m=+2865.174904643" observedRunningTime="2026-01-30 11:41:55.329322788 +0000 UTC m=+2865.879198874" watchObservedRunningTime="2026-01-30 11:42:01.508438617 +0000 UTC m=+2872.058314693" Jan 30 11:42:02 crc kubenswrapper[4869]: I0130 11:42:02.384144 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jkm94" Jan 30 11:42:02 crc kubenswrapper[4869]: I0130 11:42:02.422591 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hlkq2" Jan 30 11:42:02 crc kubenswrapper[4869]: I0130 11:42:02.422675 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hlkq2" Jan 30 11:42:02 crc kubenswrapper[4869]: I0130 11:42:02.444068 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jkm94"] Jan 30 11:42:02 crc kubenswrapper[4869]: I0130 11:42:02.485695 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hlkq2" Jan 30 11:42:03 crc kubenswrapper[4869]: I0130 11:42:03.367217 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hlkq2" Jan 30 11:42:04 crc kubenswrapper[4869]: I0130 11:42:04.112531 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hlkq2"] Jan 30 11:42:04 crc kubenswrapper[4869]: I0130 11:42:04.337362 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jkm94" podUID="2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5" containerName="registry-server" containerID="cri-o://43162f2a449f87ac67b01f3bb4f0c8f6ae10a7dba9531d1d972b28a9aad9b1dc" gracePeriod=2 Jan 30 11:42:04 crc kubenswrapper[4869]: I0130 11:42:04.706937 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jkm94" Jan 30 11:42:04 crc kubenswrapper[4869]: I0130 11:42:04.835163 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5-utilities\") pod \"2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5\" (UID: \"2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5\") " Jan 30 11:42:04 crc kubenswrapper[4869]: I0130 11:42:04.835268 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98qxs\" (UniqueName: \"kubernetes.io/projected/2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5-kube-api-access-98qxs\") pod \"2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5\" (UID: \"2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5\") " Jan 30 11:42:04 crc kubenswrapper[4869]: I0130 11:42:04.835377 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5-catalog-content\") pod \"2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5\" (UID: \"2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5\") " Jan 30 11:42:04 crc kubenswrapper[4869]: I0130 11:42:04.836400 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5-utilities" (OuterVolumeSpecName: "utilities") pod "2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5" (UID: "2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:42:04 crc kubenswrapper[4869]: I0130 11:42:04.843435 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5-kube-api-access-98qxs" (OuterVolumeSpecName: "kube-api-access-98qxs") pod "2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5" (UID: "2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5"). InnerVolumeSpecName "kube-api-access-98qxs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:42:04 crc kubenswrapper[4869]: I0130 11:42:04.892702 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5" (UID: "2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:42:04 crc kubenswrapper[4869]: I0130 11:42:04.936475 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 11:42:04 crc kubenswrapper[4869]: I0130 11:42:04.936516 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 11:42:04 crc kubenswrapper[4869]: I0130 11:42:04.936526 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98qxs\" (UniqueName: \"kubernetes.io/projected/2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5-kube-api-access-98qxs\") on node \"crc\" DevicePath \"\"" Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.346267 4869 generic.go:334] "Generic (PLEG): container finished" podID="2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5" containerID="43162f2a449f87ac67b01f3bb4f0c8f6ae10a7dba9531d1d972b28a9aad9b1dc" exitCode=0 Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.346320 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jkm94" Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.346341 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkm94" event={"ID":"2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5","Type":"ContainerDied","Data":"43162f2a449f87ac67b01f3bb4f0c8f6ae10a7dba9531d1d972b28a9aad9b1dc"} Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.346397 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkm94" event={"ID":"2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5","Type":"ContainerDied","Data":"b407e801324706a708f4cdcc0fc447bfea900ebc55d4a695265459e94ab275ea"} Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.346418 4869 scope.go:117] "RemoveContainer" containerID="43162f2a449f87ac67b01f3bb4f0c8f6ae10a7dba9531d1d972b28a9aad9b1dc" Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.346759 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-hlkq2" podUID="86bfe561-d583-4254-a4d5-89207efbeaf3" containerName="registry-server" containerID="cri-o://167dfebeb37febba4fb0d1b81aac116add41f745cbc50c91b5de2c42b86c43f4" gracePeriod=2 Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.367368 4869 scope.go:117] "RemoveContainer" containerID="df03698f45b607bc7b47c9d6c547396833626c10aa2302a85f9bad83753c30bb" Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.388883 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jkm94"] Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.393558 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jkm94"] Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.422928 4869 scope.go:117] "RemoveContainer" containerID="8a14d4c365b14cd44bcc03f8f830bffa99d3206e7ca5f8ca94f3e332e5547167" Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.476549 4869 scope.go:117] "RemoveContainer" containerID="43162f2a449f87ac67b01f3bb4f0c8f6ae10a7dba9531d1d972b28a9aad9b1dc" Jan 30 11:42:05 crc kubenswrapper[4869]: E0130 11:42:05.476954 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43162f2a449f87ac67b01f3bb4f0c8f6ae10a7dba9531d1d972b28a9aad9b1dc\": container with ID starting with 43162f2a449f87ac67b01f3bb4f0c8f6ae10a7dba9531d1d972b28a9aad9b1dc not found: ID does not exist" containerID="43162f2a449f87ac67b01f3bb4f0c8f6ae10a7dba9531d1d972b28a9aad9b1dc" Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.476982 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43162f2a449f87ac67b01f3bb4f0c8f6ae10a7dba9531d1d972b28a9aad9b1dc"} err="failed to get container status \"43162f2a449f87ac67b01f3bb4f0c8f6ae10a7dba9531d1d972b28a9aad9b1dc\": rpc error: code = NotFound desc = could not find container \"43162f2a449f87ac67b01f3bb4f0c8f6ae10a7dba9531d1d972b28a9aad9b1dc\": container with ID starting with 43162f2a449f87ac67b01f3bb4f0c8f6ae10a7dba9531d1d972b28a9aad9b1dc not found: ID does not exist" Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.477006 4869 scope.go:117] "RemoveContainer" containerID="df03698f45b607bc7b47c9d6c547396833626c10aa2302a85f9bad83753c30bb" Jan 30 11:42:05 crc kubenswrapper[4869]: E0130 11:42:05.477196 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df03698f45b607bc7b47c9d6c547396833626c10aa2302a85f9bad83753c30bb\": container with ID starting with df03698f45b607bc7b47c9d6c547396833626c10aa2302a85f9bad83753c30bb not found: ID does not exist" containerID="df03698f45b607bc7b47c9d6c547396833626c10aa2302a85f9bad83753c30bb" Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.477241 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df03698f45b607bc7b47c9d6c547396833626c10aa2302a85f9bad83753c30bb"} err="failed to get container status \"df03698f45b607bc7b47c9d6c547396833626c10aa2302a85f9bad83753c30bb\": rpc error: code = NotFound desc = could not find container \"df03698f45b607bc7b47c9d6c547396833626c10aa2302a85f9bad83753c30bb\": container with ID starting with df03698f45b607bc7b47c9d6c547396833626c10aa2302a85f9bad83753c30bb not found: ID does not exist" Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.477254 4869 scope.go:117] "RemoveContainer" containerID="8a14d4c365b14cd44bcc03f8f830bffa99d3206e7ca5f8ca94f3e332e5547167" Jan 30 11:42:05 crc kubenswrapper[4869]: E0130 11:42:05.477653 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a14d4c365b14cd44bcc03f8f830bffa99d3206e7ca5f8ca94f3e332e5547167\": container with ID starting with 8a14d4c365b14cd44bcc03f8f830bffa99d3206e7ca5f8ca94f3e332e5547167 not found: ID does not exist" containerID="8a14d4c365b14cd44bcc03f8f830bffa99d3206e7ca5f8ca94f3e332e5547167" Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.477694 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a14d4c365b14cd44bcc03f8f830bffa99d3206e7ca5f8ca94f3e332e5547167"} err="failed to get container status \"8a14d4c365b14cd44bcc03f8f830bffa99d3206e7ca5f8ca94f3e332e5547167\": rpc error: code = NotFound desc = could not find container \"8a14d4c365b14cd44bcc03f8f830bffa99d3206e7ca5f8ca94f3e332e5547167\": container with ID starting with 8a14d4c365b14cd44bcc03f8f830bffa99d3206e7ca5f8ca94f3e332e5547167 not found: ID does not exist" Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.708684 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hlkq2" Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.747463 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ghlk5\" (UniqueName: \"kubernetes.io/projected/86bfe561-d583-4254-a4d5-89207efbeaf3-kube-api-access-ghlk5\") pod \"86bfe561-d583-4254-a4d5-89207efbeaf3\" (UID: \"86bfe561-d583-4254-a4d5-89207efbeaf3\") " Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.747541 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86bfe561-d583-4254-a4d5-89207efbeaf3-utilities\") pod \"86bfe561-d583-4254-a4d5-89207efbeaf3\" (UID: \"86bfe561-d583-4254-a4d5-89207efbeaf3\") " Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.747577 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86bfe561-d583-4254-a4d5-89207efbeaf3-catalog-content\") pod \"86bfe561-d583-4254-a4d5-89207efbeaf3\" (UID: \"86bfe561-d583-4254-a4d5-89207efbeaf3\") " Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.749042 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86bfe561-d583-4254-a4d5-89207efbeaf3-utilities" (OuterVolumeSpecName: "utilities") pod "86bfe561-d583-4254-a4d5-89207efbeaf3" (UID: "86bfe561-d583-4254-a4d5-89207efbeaf3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.752006 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86bfe561-d583-4254-a4d5-89207efbeaf3-kube-api-access-ghlk5" (OuterVolumeSpecName: "kube-api-access-ghlk5") pod "86bfe561-d583-4254-a4d5-89207efbeaf3" (UID: "86bfe561-d583-4254-a4d5-89207efbeaf3"). InnerVolumeSpecName "kube-api-access-ghlk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.773671 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86bfe561-d583-4254-a4d5-89207efbeaf3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "86bfe561-d583-4254-a4d5-89207efbeaf3" (UID: "86bfe561-d583-4254-a4d5-89207efbeaf3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.849510 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ghlk5\" (UniqueName: \"kubernetes.io/projected/86bfe561-d583-4254-a4d5-89207efbeaf3-kube-api-access-ghlk5\") on node \"crc\" DevicePath \"\"" Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.849860 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86bfe561-d583-4254-a4d5-89207efbeaf3-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 11:42:05 crc kubenswrapper[4869]: I0130 11:42:05.849922 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86bfe561-d583-4254-a4d5-89207efbeaf3-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 11:42:06 crc kubenswrapper[4869]: I0130 11:42:06.144414 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5" path="/var/lib/kubelet/pods/2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5/volumes" Jan 30 11:42:06 crc kubenswrapper[4869]: I0130 11:42:06.357309 4869 generic.go:334] "Generic (PLEG): container finished" podID="86bfe561-d583-4254-a4d5-89207efbeaf3" containerID="167dfebeb37febba4fb0d1b81aac116add41f745cbc50c91b5de2c42b86c43f4" exitCode=0 Jan 30 11:42:06 crc kubenswrapper[4869]: I0130 11:42:06.357355 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hlkq2" event={"ID":"86bfe561-d583-4254-a4d5-89207efbeaf3","Type":"ContainerDied","Data":"167dfebeb37febba4fb0d1b81aac116add41f745cbc50c91b5de2c42b86c43f4"} Jan 30 11:42:06 crc kubenswrapper[4869]: I0130 11:42:06.357382 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hlkq2" event={"ID":"86bfe561-d583-4254-a4d5-89207efbeaf3","Type":"ContainerDied","Data":"5566faa94fd93fa3e988e92a269594dc8053cf0c6cc662d419e630469105cb82"} Jan 30 11:42:06 crc kubenswrapper[4869]: I0130 11:42:06.357405 4869 scope.go:117] "RemoveContainer" containerID="167dfebeb37febba4fb0d1b81aac116add41f745cbc50c91b5de2c42b86c43f4" Jan 30 11:42:06 crc kubenswrapper[4869]: I0130 11:42:06.357560 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hlkq2" Jan 30 11:42:06 crc kubenswrapper[4869]: I0130 11:42:06.387077 4869 scope.go:117] "RemoveContainer" containerID="5e8ee9eaab27e4d824e5fc4707d6a9ae321334d1ac3eb1d161d3f1edd73bb3cc" Jan 30 11:42:06 crc kubenswrapper[4869]: I0130 11:42:06.399143 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hlkq2"] Jan 30 11:42:06 crc kubenswrapper[4869]: I0130 11:42:06.408023 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-hlkq2"] Jan 30 11:42:06 crc kubenswrapper[4869]: I0130 11:42:06.410329 4869 scope.go:117] "RemoveContainer" containerID="5fddd1de9585f1d67aa40780ea947d2ee91bfda12467c4f38b15ed84ca00cec8" Jan 30 11:42:06 crc kubenswrapper[4869]: I0130 11:42:06.429632 4869 scope.go:117] "RemoveContainer" containerID="167dfebeb37febba4fb0d1b81aac116add41f745cbc50c91b5de2c42b86c43f4" Jan 30 11:42:06 crc kubenswrapper[4869]: E0130 11:42:06.430156 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"167dfebeb37febba4fb0d1b81aac116add41f745cbc50c91b5de2c42b86c43f4\": container with ID starting with 167dfebeb37febba4fb0d1b81aac116add41f745cbc50c91b5de2c42b86c43f4 not found: ID does not exist" containerID="167dfebeb37febba4fb0d1b81aac116add41f745cbc50c91b5de2c42b86c43f4" Jan 30 11:42:06 crc kubenswrapper[4869]: I0130 11:42:06.430281 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"167dfebeb37febba4fb0d1b81aac116add41f745cbc50c91b5de2c42b86c43f4"} err="failed to get container status \"167dfebeb37febba4fb0d1b81aac116add41f745cbc50c91b5de2c42b86c43f4\": rpc error: code = NotFound desc = could not find container \"167dfebeb37febba4fb0d1b81aac116add41f745cbc50c91b5de2c42b86c43f4\": container with ID starting with 167dfebeb37febba4fb0d1b81aac116add41f745cbc50c91b5de2c42b86c43f4 not found: ID does not exist" Jan 30 11:42:06 crc kubenswrapper[4869]: I0130 11:42:06.430349 4869 scope.go:117] "RemoveContainer" containerID="5e8ee9eaab27e4d824e5fc4707d6a9ae321334d1ac3eb1d161d3f1edd73bb3cc" Jan 30 11:42:06 crc kubenswrapper[4869]: E0130 11:42:06.430873 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e8ee9eaab27e4d824e5fc4707d6a9ae321334d1ac3eb1d161d3f1edd73bb3cc\": container with ID starting with 5e8ee9eaab27e4d824e5fc4707d6a9ae321334d1ac3eb1d161d3f1edd73bb3cc not found: ID does not exist" containerID="5e8ee9eaab27e4d824e5fc4707d6a9ae321334d1ac3eb1d161d3f1edd73bb3cc" Jan 30 11:42:06 crc kubenswrapper[4869]: I0130 11:42:06.430912 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e8ee9eaab27e4d824e5fc4707d6a9ae321334d1ac3eb1d161d3f1edd73bb3cc"} err="failed to get container status \"5e8ee9eaab27e4d824e5fc4707d6a9ae321334d1ac3eb1d161d3f1edd73bb3cc\": rpc error: code = NotFound desc = could not find container \"5e8ee9eaab27e4d824e5fc4707d6a9ae321334d1ac3eb1d161d3f1edd73bb3cc\": container with ID starting with 5e8ee9eaab27e4d824e5fc4707d6a9ae321334d1ac3eb1d161d3f1edd73bb3cc not found: ID does not exist" Jan 30 11:42:06 crc kubenswrapper[4869]: I0130 11:42:06.430935 4869 scope.go:117] "RemoveContainer" containerID="5fddd1de9585f1d67aa40780ea947d2ee91bfda12467c4f38b15ed84ca00cec8" Jan 30 11:42:06 crc kubenswrapper[4869]: E0130 11:42:06.431181 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fddd1de9585f1d67aa40780ea947d2ee91bfda12467c4f38b15ed84ca00cec8\": container with ID starting with 5fddd1de9585f1d67aa40780ea947d2ee91bfda12467c4f38b15ed84ca00cec8 not found: ID does not exist" containerID="5fddd1de9585f1d67aa40780ea947d2ee91bfda12467c4f38b15ed84ca00cec8" Jan 30 11:42:06 crc kubenswrapper[4869]: I0130 11:42:06.431209 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fddd1de9585f1d67aa40780ea947d2ee91bfda12467c4f38b15ed84ca00cec8"} err="failed to get container status \"5fddd1de9585f1d67aa40780ea947d2ee91bfda12467c4f38b15ed84ca00cec8\": rpc error: code = NotFound desc = could not find container \"5fddd1de9585f1d67aa40780ea947d2ee91bfda12467c4f38b15ed84ca00cec8\": container with ID starting with 5fddd1de9585f1d67aa40780ea947d2ee91bfda12467c4f38b15ed84ca00cec8 not found: ID does not exist" Jan 30 11:42:08 crc kubenswrapper[4869]: I0130 11:42:08.142213 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86bfe561-d583-4254-a4d5-89207efbeaf3" path="/var/lib/kubelet/pods/86bfe561-d583-4254-a4d5-89207efbeaf3/volumes" Jan 30 11:42:21 crc kubenswrapper[4869]: I0130 11:42:21.770984 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:42:21 crc kubenswrapper[4869]: I0130 11:42:21.771494 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:42:51 crc kubenswrapper[4869]: I0130 11:42:51.769749 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:42:51 crc kubenswrapper[4869]: I0130 11:42:51.770299 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:42:51 crc kubenswrapper[4869]: I0130 11:42:51.770353 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 11:42:51 crc kubenswrapper[4869]: I0130 11:42:51.771025 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"11e487505cedf864c909a4aa93c760d70f9e55412d7ffbfe015dd2f6c2562247"} pod="openshift-machine-config-operator/machine-config-daemon-99lr2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 11:42:51 crc kubenswrapper[4869]: I0130 11:42:51.771081 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" containerID="cri-o://11e487505cedf864c909a4aa93c760d70f9e55412d7ffbfe015dd2f6c2562247" gracePeriod=600 Jan 30 11:42:52 crc kubenswrapper[4869]: I0130 11:42:52.682578 4869 generic.go:334] "Generic (PLEG): container finished" podID="ef13186b-7f82-4025-97e3-d899be8c207f" containerID="11e487505cedf864c909a4aa93c760d70f9e55412d7ffbfe015dd2f6c2562247" exitCode=0 Jan 30 11:42:52 crc kubenswrapper[4869]: I0130 11:42:52.682701 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerDied","Data":"11e487505cedf864c909a4aa93c760d70f9e55412d7ffbfe015dd2f6c2562247"} Jan 30 11:42:52 crc kubenswrapper[4869]: I0130 11:42:52.683215 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerStarted","Data":"64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8"} Jan 30 11:42:52 crc kubenswrapper[4869]: I0130 11:42:52.683233 4869 scope.go:117] "RemoveContainer" containerID="b10ca51d9d64534fe9be70a406e402962641002f564522876a5a04fdbd68daee" Jan 30 11:44:31 crc kubenswrapper[4869]: I0130 11:44:31.360310 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-29g6s"] Jan 30 11:44:31 crc kubenswrapper[4869]: E0130 11:44:31.361264 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5" containerName="extract-content" Jan 30 11:44:31 crc kubenswrapper[4869]: I0130 11:44:31.361281 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5" containerName="extract-content" Jan 30 11:44:31 crc kubenswrapper[4869]: E0130 11:44:31.361302 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86bfe561-d583-4254-a4d5-89207efbeaf3" containerName="extract-utilities" Jan 30 11:44:31 crc kubenswrapper[4869]: I0130 11:44:31.361309 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="86bfe561-d583-4254-a4d5-89207efbeaf3" containerName="extract-utilities" Jan 30 11:44:31 crc kubenswrapper[4869]: E0130 11:44:31.361321 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5" containerName="registry-server" Jan 30 11:44:31 crc kubenswrapper[4869]: I0130 11:44:31.361329 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5" containerName="registry-server" Jan 30 11:44:31 crc kubenswrapper[4869]: E0130 11:44:31.361345 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5" containerName="extract-utilities" Jan 30 11:44:31 crc kubenswrapper[4869]: I0130 11:44:31.361353 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5" containerName="extract-utilities" Jan 30 11:44:31 crc kubenswrapper[4869]: E0130 11:44:31.361366 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86bfe561-d583-4254-a4d5-89207efbeaf3" containerName="registry-server" Jan 30 11:44:31 crc kubenswrapper[4869]: I0130 11:44:31.361373 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="86bfe561-d583-4254-a4d5-89207efbeaf3" containerName="registry-server" Jan 30 11:44:31 crc kubenswrapper[4869]: E0130 11:44:31.361391 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86bfe561-d583-4254-a4d5-89207efbeaf3" containerName="extract-content" Jan 30 11:44:31 crc kubenswrapper[4869]: I0130 11:44:31.361398 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="86bfe561-d583-4254-a4d5-89207efbeaf3" containerName="extract-content" Jan 30 11:44:31 crc kubenswrapper[4869]: I0130 11:44:31.361542 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a0eb5fd-1db7-4f58-92be-d8544f4fd1f5" containerName="registry-server" Jan 30 11:44:31 crc kubenswrapper[4869]: I0130 11:44:31.361568 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="86bfe561-d583-4254-a4d5-89207efbeaf3" containerName="registry-server" Jan 30 11:44:31 crc kubenswrapper[4869]: I0130 11:44:31.363307 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-29g6s" Jan 30 11:44:31 crc kubenswrapper[4869]: I0130 11:44:31.384645 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-29g6s"] Jan 30 11:44:31 crc kubenswrapper[4869]: I0130 11:44:31.510102 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p62cf\" (UniqueName: \"kubernetes.io/projected/fbbdca79-2a48-4b68-8228-ee4dd4ab89ba-kube-api-access-p62cf\") pod \"redhat-operators-29g6s\" (UID: \"fbbdca79-2a48-4b68-8228-ee4dd4ab89ba\") " pod="openshift-marketplace/redhat-operators-29g6s" Jan 30 11:44:31 crc kubenswrapper[4869]: I0130 11:44:31.510177 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbbdca79-2a48-4b68-8228-ee4dd4ab89ba-catalog-content\") pod \"redhat-operators-29g6s\" (UID: \"fbbdca79-2a48-4b68-8228-ee4dd4ab89ba\") " pod="openshift-marketplace/redhat-operators-29g6s" Jan 30 11:44:31 crc kubenswrapper[4869]: I0130 11:44:31.510199 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbbdca79-2a48-4b68-8228-ee4dd4ab89ba-utilities\") pod \"redhat-operators-29g6s\" (UID: \"fbbdca79-2a48-4b68-8228-ee4dd4ab89ba\") " pod="openshift-marketplace/redhat-operators-29g6s" Jan 30 11:44:31 crc kubenswrapper[4869]: I0130 11:44:31.611034 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p62cf\" (UniqueName: \"kubernetes.io/projected/fbbdca79-2a48-4b68-8228-ee4dd4ab89ba-kube-api-access-p62cf\") pod \"redhat-operators-29g6s\" (UID: \"fbbdca79-2a48-4b68-8228-ee4dd4ab89ba\") " pod="openshift-marketplace/redhat-operators-29g6s" Jan 30 11:44:31 crc kubenswrapper[4869]: I0130 11:44:31.611134 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbbdca79-2a48-4b68-8228-ee4dd4ab89ba-catalog-content\") pod \"redhat-operators-29g6s\" (UID: \"fbbdca79-2a48-4b68-8228-ee4dd4ab89ba\") " pod="openshift-marketplace/redhat-operators-29g6s" Jan 30 11:44:31 crc kubenswrapper[4869]: I0130 11:44:31.611163 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbbdca79-2a48-4b68-8228-ee4dd4ab89ba-utilities\") pod \"redhat-operators-29g6s\" (UID: \"fbbdca79-2a48-4b68-8228-ee4dd4ab89ba\") " pod="openshift-marketplace/redhat-operators-29g6s" Jan 30 11:44:31 crc kubenswrapper[4869]: I0130 11:44:31.611651 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbbdca79-2a48-4b68-8228-ee4dd4ab89ba-catalog-content\") pod \"redhat-operators-29g6s\" (UID: \"fbbdca79-2a48-4b68-8228-ee4dd4ab89ba\") " pod="openshift-marketplace/redhat-operators-29g6s" Jan 30 11:44:31 crc kubenswrapper[4869]: I0130 11:44:31.611698 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbbdca79-2a48-4b68-8228-ee4dd4ab89ba-utilities\") pod \"redhat-operators-29g6s\" (UID: \"fbbdca79-2a48-4b68-8228-ee4dd4ab89ba\") " pod="openshift-marketplace/redhat-operators-29g6s" Jan 30 11:44:31 crc kubenswrapper[4869]: I0130 11:44:31.631122 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p62cf\" (UniqueName: \"kubernetes.io/projected/fbbdca79-2a48-4b68-8228-ee4dd4ab89ba-kube-api-access-p62cf\") pod \"redhat-operators-29g6s\" (UID: \"fbbdca79-2a48-4b68-8228-ee4dd4ab89ba\") " pod="openshift-marketplace/redhat-operators-29g6s" Jan 30 11:44:31 crc kubenswrapper[4869]: I0130 11:44:31.697751 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-29g6s" Jan 30 11:44:32 crc kubenswrapper[4869]: I0130 11:44:32.181792 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-29g6s"] Jan 30 11:44:32 crc kubenswrapper[4869]: I0130 11:44:32.362464 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-29g6s" event={"ID":"fbbdca79-2a48-4b68-8228-ee4dd4ab89ba","Type":"ContainerStarted","Data":"bfa67dd82b0c3161e59db1652a6b6ed9222f9b3af881fb58aa4a7871427f2f5a"} Jan 30 11:44:32 crc kubenswrapper[4869]: I0130 11:44:32.362741 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-29g6s" event={"ID":"fbbdca79-2a48-4b68-8228-ee4dd4ab89ba","Type":"ContainerStarted","Data":"8546cf6684ac1930ace41eb6f49d7cd669dc3e2d71be47a18ad04f8a48a44eab"} Jan 30 11:44:33 crc kubenswrapper[4869]: I0130 11:44:33.369817 4869 generic.go:334] "Generic (PLEG): container finished" podID="fbbdca79-2a48-4b68-8228-ee4dd4ab89ba" containerID="bfa67dd82b0c3161e59db1652a6b6ed9222f9b3af881fb58aa4a7871427f2f5a" exitCode=0 Jan 30 11:44:33 crc kubenswrapper[4869]: I0130 11:44:33.369864 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-29g6s" event={"ID":"fbbdca79-2a48-4b68-8228-ee4dd4ab89ba","Type":"ContainerDied","Data":"bfa67dd82b0c3161e59db1652a6b6ed9222f9b3af881fb58aa4a7871427f2f5a"} Jan 30 11:44:34 crc kubenswrapper[4869]: I0130 11:44:34.377424 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-29g6s" event={"ID":"fbbdca79-2a48-4b68-8228-ee4dd4ab89ba","Type":"ContainerStarted","Data":"d0f23b1885e614b81eca4a8895c379e5dfd9315e78ad6bd4363cecce104f3ab4"} Jan 30 11:44:35 crc kubenswrapper[4869]: I0130 11:44:35.386062 4869 generic.go:334] "Generic (PLEG): container finished" podID="fbbdca79-2a48-4b68-8228-ee4dd4ab89ba" containerID="d0f23b1885e614b81eca4a8895c379e5dfd9315e78ad6bd4363cecce104f3ab4" exitCode=0 Jan 30 11:44:35 crc kubenswrapper[4869]: I0130 11:44:35.386110 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-29g6s" event={"ID":"fbbdca79-2a48-4b68-8228-ee4dd4ab89ba","Type":"ContainerDied","Data":"d0f23b1885e614b81eca4a8895c379e5dfd9315e78ad6bd4363cecce104f3ab4"} Jan 30 11:44:37 crc kubenswrapper[4869]: I0130 11:44:37.405477 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-29g6s" event={"ID":"fbbdca79-2a48-4b68-8228-ee4dd4ab89ba","Type":"ContainerStarted","Data":"d98f40c1e8366b95a2e34a261358ac94aa91c0faa6c626036e7e66c2c7570b2e"} Jan 30 11:44:37 crc kubenswrapper[4869]: I0130 11:44:37.424046 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-29g6s" podStartSLOduration=2.953152863 podStartE2EDuration="6.42402614s" podCreationTimestamp="2026-01-30 11:44:31 +0000 UTC" firstStartedPulling="2026-01-30 11:44:33.37146123 +0000 UTC m=+3023.921337296" lastFinishedPulling="2026-01-30 11:44:36.842334507 +0000 UTC m=+3027.392210573" observedRunningTime="2026-01-30 11:44:37.422952939 +0000 UTC m=+3027.972829015" watchObservedRunningTime="2026-01-30 11:44:37.42402614 +0000 UTC m=+3027.973902206" Jan 30 11:44:41 crc kubenswrapper[4869]: I0130 11:44:41.698069 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-29g6s" Jan 30 11:44:41 crc kubenswrapper[4869]: I0130 11:44:41.698413 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-29g6s" Jan 30 11:44:42 crc kubenswrapper[4869]: I0130 11:44:42.741188 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-29g6s" podUID="fbbdca79-2a48-4b68-8228-ee4dd4ab89ba" containerName="registry-server" probeResult="failure" output=< Jan 30 11:44:42 crc kubenswrapper[4869]: timeout: failed to connect service ":50051" within 1s Jan 30 11:44:42 crc kubenswrapper[4869]: > Jan 30 11:44:51 crc kubenswrapper[4869]: I0130 11:44:51.736237 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-29g6s" Jan 30 11:44:51 crc kubenswrapper[4869]: I0130 11:44:51.782608 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-29g6s" Jan 30 11:44:51 crc kubenswrapper[4869]: I0130 11:44:51.967579 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-29g6s"] Jan 30 11:44:53 crc kubenswrapper[4869]: I0130 11:44:53.513401 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-29g6s" podUID="fbbdca79-2a48-4b68-8228-ee4dd4ab89ba" containerName="registry-server" containerID="cri-o://d98f40c1e8366b95a2e34a261358ac94aa91c0faa6c626036e7e66c2c7570b2e" gracePeriod=2 Jan 30 11:44:53 crc kubenswrapper[4869]: I0130 11:44:53.878003 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-29g6s" Jan 30 11:44:54 crc kubenswrapper[4869]: I0130 11:44:54.035216 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbbdca79-2a48-4b68-8228-ee4dd4ab89ba-catalog-content\") pod \"fbbdca79-2a48-4b68-8228-ee4dd4ab89ba\" (UID: \"fbbdca79-2a48-4b68-8228-ee4dd4ab89ba\") " Jan 30 11:44:54 crc kubenswrapper[4869]: I0130 11:44:54.035291 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p62cf\" (UniqueName: \"kubernetes.io/projected/fbbdca79-2a48-4b68-8228-ee4dd4ab89ba-kube-api-access-p62cf\") pod \"fbbdca79-2a48-4b68-8228-ee4dd4ab89ba\" (UID: \"fbbdca79-2a48-4b68-8228-ee4dd4ab89ba\") " Jan 30 11:44:54 crc kubenswrapper[4869]: I0130 11:44:54.035367 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbbdca79-2a48-4b68-8228-ee4dd4ab89ba-utilities\") pod \"fbbdca79-2a48-4b68-8228-ee4dd4ab89ba\" (UID: \"fbbdca79-2a48-4b68-8228-ee4dd4ab89ba\") " Jan 30 11:44:54 crc kubenswrapper[4869]: I0130 11:44:54.038431 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbbdca79-2a48-4b68-8228-ee4dd4ab89ba-utilities" (OuterVolumeSpecName: "utilities") pod "fbbdca79-2a48-4b68-8228-ee4dd4ab89ba" (UID: "fbbdca79-2a48-4b68-8228-ee4dd4ab89ba"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:44:54 crc kubenswrapper[4869]: I0130 11:44:54.055941 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbbdca79-2a48-4b68-8228-ee4dd4ab89ba-kube-api-access-p62cf" (OuterVolumeSpecName: "kube-api-access-p62cf") pod "fbbdca79-2a48-4b68-8228-ee4dd4ab89ba" (UID: "fbbdca79-2a48-4b68-8228-ee4dd4ab89ba"). InnerVolumeSpecName "kube-api-access-p62cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:44:54 crc kubenswrapper[4869]: I0130 11:44:54.137546 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p62cf\" (UniqueName: \"kubernetes.io/projected/fbbdca79-2a48-4b68-8228-ee4dd4ab89ba-kube-api-access-p62cf\") on node \"crc\" DevicePath \"\"" Jan 30 11:44:54 crc kubenswrapper[4869]: I0130 11:44:54.137577 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbbdca79-2a48-4b68-8228-ee4dd4ab89ba-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 11:44:54 crc kubenswrapper[4869]: I0130 11:44:54.184368 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbbdca79-2a48-4b68-8228-ee4dd4ab89ba-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fbbdca79-2a48-4b68-8228-ee4dd4ab89ba" (UID: "fbbdca79-2a48-4b68-8228-ee4dd4ab89ba"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:44:54 crc kubenswrapper[4869]: I0130 11:44:54.238764 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbbdca79-2a48-4b68-8228-ee4dd4ab89ba-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 11:44:54 crc kubenswrapper[4869]: I0130 11:44:54.521849 4869 generic.go:334] "Generic (PLEG): container finished" podID="fbbdca79-2a48-4b68-8228-ee4dd4ab89ba" containerID="d98f40c1e8366b95a2e34a261358ac94aa91c0faa6c626036e7e66c2c7570b2e" exitCode=0 Jan 30 11:44:54 crc kubenswrapper[4869]: I0130 11:44:54.521892 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-29g6s" event={"ID":"fbbdca79-2a48-4b68-8228-ee4dd4ab89ba","Type":"ContainerDied","Data":"d98f40c1e8366b95a2e34a261358ac94aa91c0faa6c626036e7e66c2c7570b2e"} Jan 30 11:44:54 crc kubenswrapper[4869]: I0130 11:44:54.521922 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-29g6s" event={"ID":"fbbdca79-2a48-4b68-8228-ee4dd4ab89ba","Type":"ContainerDied","Data":"8546cf6684ac1930ace41eb6f49d7cd669dc3e2d71be47a18ad04f8a48a44eab"} Jan 30 11:44:54 crc kubenswrapper[4869]: I0130 11:44:54.521932 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-29g6s" Jan 30 11:44:54 crc kubenswrapper[4869]: I0130 11:44:54.521940 4869 scope.go:117] "RemoveContainer" containerID="d98f40c1e8366b95a2e34a261358ac94aa91c0faa6c626036e7e66c2c7570b2e" Jan 30 11:44:54 crc kubenswrapper[4869]: I0130 11:44:54.541736 4869 scope.go:117] "RemoveContainer" containerID="d0f23b1885e614b81eca4a8895c379e5dfd9315e78ad6bd4363cecce104f3ab4" Jan 30 11:44:54 crc kubenswrapper[4869]: I0130 11:44:54.550910 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-29g6s"] Jan 30 11:44:54 crc kubenswrapper[4869]: I0130 11:44:54.556654 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-29g6s"] Jan 30 11:44:54 crc kubenswrapper[4869]: I0130 11:44:54.578205 4869 scope.go:117] "RemoveContainer" containerID="bfa67dd82b0c3161e59db1652a6b6ed9222f9b3af881fb58aa4a7871427f2f5a" Jan 30 11:44:54 crc kubenswrapper[4869]: I0130 11:44:54.593744 4869 scope.go:117] "RemoveContainer" containerID="d98f40c1e8366b95a2e34a261358ac94aa91c0faa6c626036e7e66c2c7570b2e" Jan 30 11:44:54 crc kubenswrapper[4869]: E0130 11:44:54.594165 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d98f40c1e8366b95a2e34a261358ac94aa91c0faa6c626036e7e66c2c7570b2e\": container with ID starting with d98f40c1e8366b95a2e34a261358ac94aa91c0faa6c626036e7e66c2c7570b2e not found: ID does not exist" containerID="d98f40c1e8366b95a2e34a261358ac94aa91c0faa6c626036e7e66c2c7570b2e" Jan 30 11:44:54 crc kubenswrapper[4869]: I0130 11:44:54.594219 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d98f40c1e8366b95a2e34a261358ac94aa91c0faa6c626036e7e66c2c7570b2e"} err="failed to get container status \"d98f40c1e8366b95a2e34a261358ac94aa91c0faa6c626036e7e66c2c7570b2e\": rpc error: code = NotFound desc = could not find container \"d98f40c1e8366b95a2e34a261358ac94aa91c0faa6c626036e7e66c2c7570b2e\": container with ID starting with d98f40c1e8366b95a2e34a261358ac94aa91c0faa6c626036e7e66c2c7570b2e not found: ID does not exist" Jan 30 11:44:54 crc kubenswrapper[4869]: I0130 11:44:54.594247 4869 scope.go:117] "RemoveContainer" containerID="d0f23b1885e614b81eca4a8895c379e5dfd9315e78ad6bd4363cecce104f3ab4" Jan 30 11:44:54 crc kubenswrapper[4869]: E0130 11:44:54.594571 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0f23b1885e614b81eca4a8895c379e5dfd9315e78ad6bd4363cecce104f3ab4\": container with ID starting with d0f23b1885e614b81eca4a8895c379e5dfd9315e78ad6bd4363cecce104f3ab4 not found: ID does not exist" containerID="d0f23b1885e614b81eca4a8895c379e5dfd9315e78ad6bd4363cecce104f3ab4" Jan 30 11:44:54 crc kubenswrapper[4869]: I0130 11:44:54.594615 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0f23b1885e614b81eca4a8895c379e5dfd9315e78ad6bd4363cecce104f3ab4"} err="failed to get container status \"d0f23b1885e614b81eca4a8895c379e5dfd9315e78ad6bd4363cecce104f3ab4\": rpc error: code = NotFound desc = could not find container \"d0f23b1885e614b81eca4a8895c379e5dfd9315e78ad6bd4363cecce104f3ab4\": container with ID starting with d0f23b1885e614b81eca4a8895c379e5dfd9315e78ad6bd4363cecce104f3ab4 not found: ID does not exist" Jan 30 11:44:54 crc kubenswrapper[4869]: I0130 11:44:54.594642 4869 scope.go:117] "RemoveContainer" containerID="bfa67dd82b0c3161e59db1652a6b6ed9222f9b3af881fb58aa4a7871427f2f5a" Jan 30 11:44:54 crc kubenswrapper[4869]: E0130 11:44:54.595116 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfa67dd82b0c3161e59db1652a6b6ed9222f9b3af881fb58aa4a7871427f2f5a\": container with ID starting with bfa67dd82b0c3161e59db1652a6b6ed9222f9b3af881fb58aa4a7871427f2f5a not found: ID does not exist" containerID="bfa67dd82b0c3161e59db1652a6b6ed9222f9b3af881fb58aa4a7871427f2f5a" Jan 30 11:44:54 crc kubenswrapper[4869]: I0130 11:44:54.595139 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfa67dd82b0c3161e59db1652a6b6ed9222f9b3af881fb58aa4a7871427f2f5a"} err="failed to get container status \"bfa67dd82b0c3161e59db1652a6b6ed9222f9b3af881fb58aa4a7871427f2f5a\": rpc error: code = NotFound desc = could not find container \"bfa67dd82b0c3161e59db1652a6b6ed9222f9b3af881fb58aa4a7871427f2f5a\": container with ID starting with bfa67dd82b0c3161e59db1652a6b6ed9222f9b3af881fb58aa4a7871427f2f5a not found: ID does not exist" Jan 30 11:44:56 crc kubenswrapper[4869]: I0130 11:44:56.140944 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbbdca79-2a48-4b68-8228-ee4dd4ab89ba" path="/var/lib/kubelet/pods/fbbdca79-2a48-4b68-8228-ee4dd4ab89ba/volumes" Jan 30 11:45:00 crc kubenswrapper[4869]: I0130 11:45:00.162067 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496225-nk2r8"] Jan 30 11:45:00 crc kubenswrapper[4869]: E0130 11:45:00.163290 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbbdca79-2a48-4b68-8228-ee4dd4ab89ba" containerName="registry-server" Jan 30 11:45:00 crc kubenswrapper[4869]: I0130 11:45:00.163321 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbbdca79-2a48-4b68-8228-ee4dd4ab89ba" containerName="registry-server" Jan 30 11:45:00 crc kubenswrapper[4869]: E0130 11:45:00.163363 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbbdca79-2a48-4b68-8228-ee4dd4ab89ba" containerName="extract-utilities" Jan 30 11:45:00 crc kubenswrapper[4869]: I0130 11:45:00.163811 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbbdca79-2a48-4b68-8228-ee4dd4ab89ba" containerName="extract-utilities" Jan 30 11:45:00 crc kubenswrapper[4869]: E0130 11:45:00.163842 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbbdca79-2a48-4b68-8228-ee4dd4ab89ba" containerName="extract-content" Jan 30 11:45:00 crc kubenswrapper[4869]: I0130 11:45:00.163854 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbbdca79-2a48-4b68-8228-ee4dd4ab89ba" containerName="extract-content" Jan 30 11:45:00 crc kubenswrapper[4869]: I0130 11:45:00.164123 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbbdca79-2a48-4b68-8228-ee4dd4ab89ba" containerName="registry-server" Jan 30 11:45:00 crc kubenswrapper[4869]: I0130 11:45:00.165172 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496225-nk2r8" Jan 30 11:45:00 crc kubenswrapper[4869]: I0130 11:45:00.168790 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496225-nk2r8"] Jan 30 11:45:00 crc kubenswrapper[4869]: I0130 11:45:00.193515 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 30 11:45:00 crc kubenswrapper[4869]: I0130 11:45:00.193556 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 30 11:45:00 crc kubenswrapper[4869]: I0130 11:45:00.330548 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e72b1dc7-d58a-4748-9c13-3da642bea419-secret-volume\") pod \"collect-profiles-29496225-nk2r8\" (UID: \"e72b1dc7-d58a-4748-9c13-3da642bea419\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496225-nk2r8" Jan 30 11:45:00 crc kubenswrapper[4869]: I0130 11:45:00.330801 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqt9k\" (UniqueName: \"kubernetes.io/projected/e72b1dc7-d58a-4748-9c13-3da642bea419-kube-api-access-cqt9k\") pod \"collect-profiles-29496225-nk2r8\" (UID: \"e72b1dc7-d58a-4748-9c13-3da642bea419\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496225-nk2r8" Jan 30 11:45:00 crc kubenswrapper[4869]: I0130 11:45:00.330939 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e72b1dc7-d58a-4748-9c13-3da642bea419-config-volume\") pod \"collect-profiles-29496225-nk2r8\" (UID: \"e72b1dc7-d58a-4748-9c13-3da642bea419\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496225-nk2r8" Jan 30 11:45:00 crc kubenswrapper[4869]: I0130 11:45:00.432484 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqt9k\" (UniqueName: \"kubernetes.io/projected/e72b1dc7-d58a-4748-9c13-3da642bea419-kube-api-access-cqt9k\") pod \"collect-profiles-29496225-nk2r8\" (UID: \"e72b1dc7-d58a-4748-9c13-3da642bea419\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496225-nk2r8" Jan 30 11:45:00 crc kubenswrapper[4869]: I0130 11:45:00.433149 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e72b1dc7-d58a-4748-9c13-3da642bea419-config-volume\") pod \"collect-profiles-29496225-nk2r8\" (UID: \"e72b1dc7-d58a-4748-9c13-3da642bea419\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496225-nk2r8" Jan 30 11:45:00 crc kubenswrapper[4869]: I0130 11:45:00.433305 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e72b1dc7-d58a-4748-9c13-3da642bea419-secret-volume\") pod \"collect-profiles-29496225-nk2r8\" (UID: \"e72b1dc7-d58a-4748-9c13-3da642bea419\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496225-nk2r8" Jan 30 11:45:00 crc kubenswrapper[4869]: I0130 11:45:00.434103 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e72b1dc7-d58a-4748-9c13-3da642bea419-config-volume\") pod \"collect-profiles-29496225-nk2r8\" (UID: \"e72b1dc7-d58a-4748-9c13-3da642bea419\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496225-nk2r8" Jan 30 11:45:00 crc kubenswrapper[4869]: I0130 11:45:00.438690 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e72b1dc7-d58a-4748-9c13-3da642bea419-secret-volume\") pod \"collect-profiles-29496225-nk2r8\" (UID: \"e72b1dc7-d58a-4748-9c13-3da642bea419\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496225-nk2r8" Jan 30 11:45:00 crc kubenswrapper[4869]: I0130 11:45:00.448788 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqt9k\" (UniqueName: \"kubernetes.io/projected/e72b1dc7-d58a-4748-9c13-3da642bea419-kube-api-access-cqt9k\") pod \"collect-profiles-29496225-nk2r8\" (UID: \"e72b1dc7-d58a-4748-9c13-3da642bea419\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496225-nk2r8" Jan 30 11:45:00 crc kubenswrapper[4869]: I0130 11:45:00.518281 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496225-nk2r8" Jan 30 11:45:00 crc kubenswrapper[4869]: I0130 11:45:00.979907 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496225-nk2r8"] Jan 30 11:45:01 crc kubenswrapper[4869]: I0130 11:45:01.586569 4869 generic.go:334] "Generic (PLEG): container finished" podID="e72b1dc7-d58a-4748-9c13-3da642bea419" containerID="9456bbbd83da80b71808ea93de6b983c2281788e18c1550dc08d5d1c7088d60c" exitCode=0 Jan 30 11:45:01 crc kubenswrapper[4869]: I0130 11:45:01.586692 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496225-nk2r8" event={"ID":"e72b1dc7-d58a-4748-9c13-3da642bea419","Type":"ContainerDied","Data":"9456bbbd83da80b71808ea93de6b983c2281788e18c1550dc08d5d1c7088d60c"} Jan 30 11:45:01 crc kubenswrapper[4869]: I0130 11:45:01.586903 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496225-nk2r8" event={"ID":"e72b1dc7-d58a-4748-9c13-3da642bea419","Type":"ContainerStarted","Data":"5c23002160f418e1b3f5efd21b3b2d655c43691f336b994ab1bd5a36157f7db0"} Jan 30 11:45:02 crc kubenswrapper[4869]: I0130 11:45:02.834278 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496225-nk2r8" Jan 30 11:45:02 crc kubenswrapper[4869]: I0130 11:45:02.968540 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e72b1dc7-d58a-4748-9c13-3da642bea419-secret-volume\") pod \"e72b1dc7-d58a-4748-9c13-3da642bea419\" (UID: \"e72b1dc7-d58a-4748-9c13-3da642bea419\") " Jan 30 11:45:02 crc kubenswrapper[4869]: I0130 11:45:02.968679 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e72b1dc7-d58a-4748-9c13-3da642bea419-config-volume\") pod \"e72b1dc7-d58a-4748-9c13-3da642bea419\" (UID: \"e72b1dc7-d58a-4748-9c13-3da642bea419\") " Jan 30 11:45:02 crc kubenswrapper[4869]: I0130 11:45:02.968762 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqt9k\" (UniqueName: \"kubernetes.io/projected/e72b1dc7-d58a-4748-9c13-3da642bea419-kube-api-access-cqt9k\") pod \"e72b1dc7-d58a-4748-9c13-3da642bea419\" (UID: \"e72b1dc7-d58a-4748-9c13-3da642bea419\") " Jan 30 11:45:02 crc kubenswrapper[4869]: I0130 11:45:02.969643 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e72b1dc7-d58a-4748-9c13-3da642bea419-config-volume" (OuterVolumeSpecName: "config-volume") pod "e72b1dc7-d58a-4748-9c13-3da642bea419" (UID: "e72b1dc7-d58a-4748-9c13-3da642bea419"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 11:45:02 crc kubenswrapper[4869]: I0130 11:45:02.974585 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e72b1dc7-d58a-4748-9c13-3da642bea419-kube-api-access-cqt9k" (OuterVolumeSpecName: "kube-api-access-cqt9k") pod "e72b1dc7-d58a-4748-9c13-3da642bea419" (UID: "e72b1dc7-d58a-4748-9c13-3da642bea419"). InnerVolumeSpecName "kube-api-access-cqt9k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:45:02 crc kubenswrapper[4869]: I0130 11:45:02.974607 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e72b1dc7-d58a-4748-9c13-3da642bea419-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e72b1dc7-d58a-4748-9c13-3da642bea419" (UID: "e72b1dc7-d58a-4748-9c13-3da642bea419"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 11:45:03 crc kubenswrapper[4869]: I0130 11:45:03.070037 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqt9k\" (UniqueName: \"kubernetes.io/projected/e72b1dc7-d58a-4748-9c13-3da642bea419-kube-api-access-cqt9k\") on node \"crc\" DevicePath \"\"" Jan 30 11:45:03 crc kubenswrapper[4869]: I0130 11:45:03.070083 4869 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e72b1dc7-d58a-4748-9c13-3da642bea419-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 30 11:45:03 crc kubenswrapper[4869]: I0130 11:45:03.070094 4869 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e72b1dc7-d58a-4748-9c13-3da642bea419-config-volume\") on node \"crc\" DevicePath \"\"" Jan 30 11:45:03 crc kubenswrapper[4869]: I0130 11:45:03.605445 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496225-nk2r8" event={"ID":"e72b1dc7-d58a-4748-9c13-3da642bea419","Type":"ContainerDied","Data":"5c23002160f418e1b3f5efd21b3b2d655c43691f336b994ab1bd5a36157f7db0"} Jan 30 11:45:03 crc kubenswrapper[4869]: I0130 11:45:03.605495 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5c23002160f418e1b3f5efd21b3b2d655c43691f336b994ab1bd5a36157f7db0" Jan 30 11:45:03 crc kubenswrapper[4869]: I0130 11:45:03.605506 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496225-nk2r8" Jan 30 11:45:03 crc kubenswrapper[4869]: I0130 11:45:03.903070 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496180-bjlvf"] Jan 30 11:45:03 crc kubenswrapper[4869]: I0130 11:45:03.908099 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496180-bjlvf"] Jan 30 11:45:04 crc kubenswrapper[4869]: I0130 11:45:04.142455 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b980241e-5870-4e34-af67-379e2470fb36" path="/var/lib/kubelet/pods/b980241e-5870-4e34-af67-379e2470fb36/volumes" Jan 30 11:45:15 crc kubenswrapper[4869]: I0130 11:45:15.134592 4869 scope.go:117] "RemoveContainer" containerID="cc060fb634eddcda6fb706c2a7dcdb30c253af51f7878c50bdbf547b343700e3" Jan 30 11:45:21 crc kubenswrapper[4869]: I0130 11:45:21.769193 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:45:21 crc kubenswrapper[4869]: I0130 11:45:21.769700 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:45:51 crc kubenswrapper[4869]: I0130 11:45:51.769666 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:45:51 crc kubenswrapper[4869]: I0130 11:45:51.770221 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:46:21 crc kubenswrapper[4869]: I0130 11:46:21.769184 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:46:21 crc kubenswrapper[4869]: I0130 11:46:21.769789 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:46:21 crc kubenswrapper[4869]: I0130 11:46:21.769846 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 11:46:21 crc kubenswrapper[4869]: I0130 11:46:21.770450 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8"} pod="openshift-machine-config-operator/machine-config-daemon-99lr2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 11:46:21 crc kubenswrapper[4869]: I0130 11:46:21.770492 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" containerID="cri-o://64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" gracePeriod=600 Jan 30 11:46:21 crc kubenswrapper[4869]: E0130 11:46:21.892546 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:46:22 crc kubenswrapper[4869]: I0130 11:46:22.127662 4869 generic.go:334] "Generic (PLEG): container finished" podID="ef13186b-7f82-4025-97e3-d899be8c207f" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" exitCode=0 Jan 30 11:46:22 crc kubenswrapper[4869]: I0130 11:46:22.127731 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerDied","Data":"64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8"} Jan 30 11:46:22 crc kubenswrapper[4869]: I0130 11:46:22.127771 4869 scope.go:117] "RemoveContainer" containerID="11e487505cedf864c909a4aa93c760d70f9e55412d7ffbfe015dd2f6c2562247" Jan 30 11:46:22 crc kubenswrapper[4869]: I0130 11:46:22.128352 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:46:22 crc kubenswrapper[4869]: E0130 11:46:22.128651 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:46:36 crc kubenswrapper[4869]: I0130 11:46:36.133917 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:46:36 crc kubenswrapper[4869]: E0130 11:46:36.134487 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:46:50 crc kubenswrapper[4869]: I0130 11:46:50.140573 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:46:50 crc kubenswrapper[4869]: E0130 11:46:50.141418 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:47:01 crc kubenswrapper[4869]: I0130 11:47:01.133540 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:47:01 crc kubenswrapper[4869]: E0130 11:47:01.134326 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:47:16 crc kubenswrapper[4869]: I0130 11:47:16.133268 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:47:16 crc kubenswrapper[4869]: E0130 11:47:16.134331 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:47:27 crc kubenswrapper[4869]: I0130 11:47:27.133006 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:47:27 crc kubenswrapper[4869]: E0130 11:47:27.133796 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:47:40 crc kubenswrapper[4869]: I0130 11:47:40.137671 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:47:40 crc kubenswrapper[4869]: E0130 11:47:40.138499 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:47:53 crc kubenswrapper[4869]: I0130 11:47:53.132638 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:47:53 crc kubenswrapper[4869]: E0130 11:47:53.133417 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:48:07 crc kubenswrapper[4869]: I0130 11:48:07.133410 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:48:07 crc kubenswrapper[4869]: E0130 11:48:07.133899 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:48:20 crc kubenswrapper[4869]: I0130 11:48:20.138460 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:48:20 crc kubenswrapper[4869]: E0130 11:48:20.139090 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:48:33 crc kubenswrapper[4869]: I0130 11:48:33.132740 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:48:33 crc kubenswrapper[4869]: E0130 11:48:33.133376 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:48:48 crc kubenswrapper[4869]: I0130 11:48:48.144983 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:48:48 crc kubenswrapper[4869]: E0130 11:48:48.146006 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:48:59 crc kubenswrapper[4869]: I0130 11:48:59.132813 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:48:59 crc kubenswrapper[4869]: E0130 11:48:59.133475 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:49:06 crc kubenswrapper[4869]: I0130 11:49:06.288481 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9lqtj"] Jan 30 11:49:06 crc kubenswrapper[4869]: E0130 11:49:06.289618 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e72b1dc7-d58a-4748-9c13-3da642bea419" containerName="collect-profiles" Jan 30 11:49:06 crc kubenswrapper[4869]: I0130 11:49:06.289633 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e72b1dc7-d58a-4748-9c13-3da642bea419" containerName="collect-profiles" Jan 30 11:49:06 crc kubenswrapper[4869]: I0130 11:49:06.289850 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="e72b1dc7-d58a-4748-9c13-3da642bea419" containerName="collect-profiles" Jan 30 11:49:06 crc kubenswrapper[4869]: I0130 11:49:06.291770 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9lqtj" Jan 30 11:49:06 crc kubenswrapper[4869]: I0130 11:49:06.325063 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9lqtj"] Jan 30 11:49:06 crc kubenswrapper[4869]: I0130 11:49:06.368187 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1971f380-0365-45db-a3ca-40a2cd2ae484-catalog-content\") pod \"community-operators-9lqtj\" (UID: \"1971f380-0365-45db-a3ca-40a2cd2ae484\") " pod="openshift-marketplace/community-operators-9lqtj" Jan 30 11:49:06 crc kubenswrapper[4869]: I0130 11:49:06.368284 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1971f380-0365-45db-a3ca-40a2cd2ae484-utilities\") pod \"community-operators-9lqtj\" (UID: \"1971f380-0365-45db-a3ca-40a2cd2ae484\") " pod="openshift-marketplace/community-operators-9lqtj" Jan 30 11:49:06 crc kubenswrapper[4869]: I0130 11:49:06.368330 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmwwt\" (UniqueName: \"kubernetes.io/projected/1971f380-0365-45db-a3ca-40a2cd2ae484-kube-api-access-mmwwt\") pod \"community-operators-9lqtj\" (UID: \"1971f380-0365-45db-a3ca-40a2cd2ae484\") " pod="openshift-marketplace/community-operators-9lqtj" Jan 30 11:49:06 crc kubenswrapper[4869]: I0130 11:49:06.470025 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1971f380-0365-45db-a3ca-40a2cd2ae484-utilities\") pod \"community-operators-9lqtj\" (UID: \"1971f380-0365-45db-a3ca-40a2cd2ae484\") " pod="openshift-marketplace/community-operators-9lqtj" Jan 30 11:49:06 crc kubenswrapper[4869]: I0130 11:49:06.470075 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmwwt\" (UniqueName: \"kubernetes.io/projected/1971f380-0365-45db-a3ca-40a2cd2ae484-kube-api-access-mmwwt\") pod \"community-operators-9lqtj\" (UID: \"1971f380-0365-45db-a3ca-40a2cd2ae484\") " pod="openshift-marketplace/community-operators-9lqtj" Jan 30 11:49:06 crc kubenswrapper[4869]: I0130 11:49:06.470145 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1971f380-0365-45db-a3ca-40a2cd2ae484-catalog-content\") pod \"community-operators-9lqtj\" (UID: \"1971f380-0365-45db-a3ca-40a2cd2ae484\") " pod="openshift-marketplace/community-operators-9lqtj" Jan 30 11:49:06 crc kubenswrapper[4869]: I0130 11:49:06.470613 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1971f380-0365-45db-a3ca-40a2cd2ae484-utilities\") pod \"community-operators-9lqtj\" (UID: \"1971f380-0365-45db-a3ca-40a2cd2ae484\") " pod="openshift-marketplace/community-operators-9lqtj" Jan 30 11:49:06 crc kubenswrapper[4869]: I0130 11:49:06.470629 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1971f380-0365-45db-a3ca-40a2cd2ae484-catalog-content\") pod \"community-operators-9lqtj\" (UID: \"1971f380-0365-45db-a3ca-40a2cd2ae484\") " pod="openshift-marketplace/community-operators-9lqtj" Jan 30 11:49:06 crc kubenswrapper[4869]: I0130 11:49:06.492827 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmwwt\" (UniqueName: \"kubernetes.io/projected/1971f380-0365-45db-a3ca-40a2cd2ae484-kube-api-access-mmwwt\") pod \"community-operators-9lqtj\" (UID: \"1971f380-0365-45db-a3ca-40a2cd2ae484\") " pod="openshift-marketplace/community-operators-9lqtj" Jan 30 11:49:06 crc kubenswrapper[4869]: I0130 11:49:06.629621 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9lqtj" Jan 30 11:49:07 crc kubenswrapper[4869]: I0130 11:49:07.139660 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9lqtj"] Jan 30 11:49:07 crc kubenswrapper[4869]: I0130 11:49:07.309729 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9lqtj" event={"ID":"1971f380-0365-45db-a3ca-40a2cd2ae484","Type":"ContainerStarted","Data":"400ea7f1bf4c74400f0eec3cd9385dd2c40fbdaa07778474cce5840c6247bdc6"} Jan 30 11:49:08 crc kubenswrapper[4869]: I0130 11:49:08.319539 4869 generic.go:334] "Generic (PLEG): container finished" podID="1971f380-0365-45db-a3ca-40a2cd2ae484" containerID="45bafd1a911abdca7fd714f2f67afe74daa136c8b54700d6dfc25ba041b3ace1" exitCode=0 Jan 30 11:49:08 crc kubenswrapper[4869]: I0130 11:49:08.319589 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9lqtj" event={"ID":"1971f380-0365-45db-a3ca-40a2cd2ae484","Type":"ContainerDied","Data":"45bafd1a911abdca7fd714f2f67afe74daa136c8b54700d6dfc25ba041b3ace1"} Jan 30 11:49:08 crc kubenswrapper[4869]: I0130 11:49:08.321954 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 11:49:09 crc kubenswrapper[4869]: I0130 11:49:09.329806 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9lqtj" event={"ID":"1971f380-0365-45db-a3ca-40a2cd2ae484","Type":"ContainerStarted","Data":"5801625224e0eef3ac8629b8829273fb25ddae1010eaf11e55afeedc117afcf1"} Jan 30 11:49:10 crc kubenswrapper[4869]: I0130 11:49:10.337554 4869 generic.go:334] "Generic (PLEG): container finished" podID="1971f380-0365-45db-a3ca-40a2cd2ae484" containerID="5801625224e0eef3ac8629b8829273fb25ddae1010eaf11e55afeedc117afcf1" exitCode=0 Jan 30 11:49:10 crc kubenswrapper[4869]: I0130 11:49:10.337628 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9lqtj" event={"ID":"1971f380-0365-45db-a3ca-40a2cd2ae484","Type":"ContainerDied","Data":"5801625224e0eef3ac8629b8829273fb25ddae1010eaf11e55afeedc117afcf1"} Jan 30 11:49:11 crc kubenswrapper[4869]: I0130 11:49:11.133097 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:49:11 crc kubenswrapper[4869]: E0130 11:49:11.133579 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:49:12 crc kubenswrapper[4869]: I0130 11:49:12.354686 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9lqtj" event={"ID":"1971f380-0365-45db-a3ca-40a2cd2ae484","Type":"ContainerStarted","Data":"603d86052ac4893459f98151337a581fc6afb2754c5ce0667632b8e858d07f93"} Jan 30 11:49:12 crc kubenswrapper[4869]: I0130 11:49:12.384061 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9lqtj" podStartSLOduration=3.551946195 podStartE2EDuration="6.384028241s" podCreationTimestamp="2026-01-30 11:49:06 +0000 UTC" firstStartedPulling="2026-01-30 11:49:08.321616308 +0000 UTC m=+3298.871492374" lastFinishedPulling="2026-01-30 11:49:11.153698354 +0000 UTC m=+3301.703574420" observedRunningTime="2026-01-30 11:49:12.378279449 +0000 UTC m=+3302.928155515" watchObservedRunningTime="2026-01-30 11:49:12.384028241 +0000 UTC m=+3302.933904307" Jan 30 11:49:16 crc kubenswrapper[4869]: I0130 11:49:16.630067 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9lqtj" Jan 30 11:49:16 crc kubenswrapper[4869]: I0130 11:49:16.630386 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9lqtj" Jan 30 11:49:16 crc kubenswrapper[4869]: I0130 11:49:16.666327 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9lqtj" Jan 30 11:49:17 crc kubenswrapper[4869]: I0130 11:49:17.428501 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9lqtj" Jan 30 11:49:17 crc kubenswrapper[4869]: I0130 11:49:17.475893 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9lqtj"] Jan 30 11:49:19 crc kubenswrapper[4869]: I0130 11:49:19.399627 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9lqtj" podUID="1971f380-0365-45db-a3ca-40a2cd2ae484" containerName="registry-server" containerID="cri-o://603d86052ac4893459f98151337a581fc6afb2754c5ce0667632b8e858d07f93" gracePeriod=2 Jan 30 11:49:20 crc kubenswrapper[4869]: I0130 11:49:20.289737 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9lqtj" Jan 30 11:49:20 crc kubenswrapper[4869]: I0130 11:49:20.380532 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1971f380-0365-45db-a3ca-40a2cd2ae484-catalog-content\") pod \"1971f380-0365-45db-a3ca-40a2cd2ae484\" (UID: \"1971f380-0365-45db-a3ca-40a2cd2ae484\") " Jan 30 11:49:20 crc kubenswrapper[4869]: I0130 11:49:20.382880 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmwwt\" (UniqueName: \"kubernetes.io/projected/1971f380-0365-45db-a3ca-40a2cd2ae484-kube-api-access-mmwwt\") pod \"1971f380-0365-45db-a3ca-40a2cd2ae484\" (UID: \"1971f380-0365-45db-a3ca-40a2cd2ae484\") " Jan 30 11:49:20 crc kubenswrapper[4869]: I0130 11:49:20.382976 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1971f380-0365-45db-a3ca-40a2cd2ae484-utilities\") pod \"1971f380-0365-45db-a3ca-40a2cd2ae484\" (UID: \"1971f380-0365-45db-a3ca-40a2cd2ae484\") " Jan 30 11:49:20 crc kubenswrapper[4869]: I0130 11:49:20.383867 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1971f380-0365-45db-a3ca-40a2cd2ae484-utilities" (OuterVolumeSpecName: "utilities") pod "1971f380-0365-45db-a3ca-40a2cd2ae484" (UID: "1971f380-0365-45db-a3ca-40a2cd2ae484"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:49:20 crc kubenswrapper[4869]: I0130 11:49:20.394085 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1971f380-0365-45db-a3ca-40a2cd2ae484-kube-api-access-mmwwt" (OuterVolumeSpecName: "kube-api-access-mmwwt") pod "1971f380-0365-45db-a3ca-40a2cd2ae484" (UID: "1971f380-0365-45db-a3ca-40a2cd2ae484"). InnerVolumeSpecName "kube-api-access-mmwwt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:49:20 crc kubenswrapper[4869]: I0130 11:49:20.419436 4869 generic.go:334] "Generic (PLEG): container finished" podID="1971f380-0365-45db-a3ca-40a2cd2ae484" containerID="603d86052ac4893459f98151337a581fc6afb2754c5ce0667632b8e858d07f93" exitCode=0 Jan 30 11:49:20 crc kubenswrapper[4869]: I0130 11:49:20.419493 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9lqtj" event={"ID":"1971f380-0365-45db-a3ca-40a2cd2ae484","Type":"ContainerDied","Data":"603d86052ac4893459f98151337a581fc6afb2754c5ce0667632b8e858d07f93"} Jan 30 11:49:20 crc kubenswrapper[4869]: I0130 11:49:20.419528 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9lqtj" event={"ID":"1971f380-0365-45db-a3ca-40a2cd2ae484","Type":"ContainerDied","Data":"400ea7f1bf4c74400f0eec3cd9385dd2c40fbdaa07778474cce5840c6247bdc6"} Jan 30 11:49:20 crc kubenswrapper[4869]: I0130 11:49:20.419551 4869 scope.go:117] "RemoveContainer" containerID="603d86052ac4893459f98151337a581fc6afb2754c5ce0667632b8e858d07f93" Jan 30 11:49:20 crc kubenswrapper[4869]: I0130 11:49:20.419737 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9lqtj" Jan 30 11:49:20 crc kubenswrapper[4869]: I0130 11:49:20.435591 4869 scope.go:117] "RemoveContainer" containerID="5801625224e0eef3ac8629b8829273fb25ddae1010eaf11e55afeedc117afcf1" Jan 30 11:49:20 crc kubenswrapper[4869]: I0130 11:49:20.449695 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1971f380-0365-45db-a3ca-40a2cd2ae484-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1971f380-0365-45db-a3ca-40a2cd2ae484" (UID: "1971f380-0365-45db-a3ca-40a2cd2ae484"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:49:20 crc kubenswrapper[4869]: I0130 11:49:20.450359 4869 scope.go:117] "RemoveContainer" containerID="45bafd1a911abdca7fd714f2f67afe74daa136c8b54700d6dfc25ba041b3ace1" Jan 30 11:49:20 crc kubenswrapper[4869]: I0130 11:49:20.473449 4869 scope.go:117] "RemoveContainer" containerID="603d86052ac4893459f98151337a581fc6afb2754c5ce0667632b8e858d07f93" Jan 30 11:49:20 crc kubenswrapper[4869]: E0130 11:49:20.474013 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"603d86052ac4893459f98151337a581fc6afb2754c5ce0667632b8e858d07f93\": container with ID starting with 603d86052ac4893459f98151337a581fc6afb2754c5ce0667632b8e858d07f93 not found: ID does not exist" containerID="603d86052ac4893459f98151337a581fc6afb2754c5ce0667632b8e858d07f93" Jan 30 11:49:20 crc kubenswrapper[4869]: I0130 11:49:20.474077 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"603d86052ac4893459f98151337a581fc6afb2754c5ce0667632b8e858d07f93"} err="failed to get container status \"603d86052ac4893459f98151337a581fc6afb2754c5ce0667632b8e858d07f93\": rpc error: code = NotFound desc = could not find container \"603d86052ac4893459f98151337a581fc6afb2754c5ce0667632b8e858d07f93\": container with ID starting with 603d86052ac4893459f98151337a581fc6afb2754c5ce0667632b8e858d07f93 not found: ID does not exist" Jan 30 11:49:20 crc kubenswrapper[4869]: I0130 11:49:20.474115 4869 scope.go:117] "RemoveContainer" containerID="5801625224e0eef3ac8629b8829273fb25ddae1010eaf11e55afeedc117afcf1" Jan 30 11:49:20 crc kubenswrapper[4869]: E0130 11:49:20.474524 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5801625224e0eef3ac8629b8829273fb25ddae1010eaf11e55afeedc117afcf1\": container with ID starting with 5801625224e0eef3ac8629b8829273fb25ddae1010eaf11e55afeedc117afcf1 not found: ID does not exist" containerID="5801625224e0eef3ac8629b8829273fb25ddae1010eaf11e55afeedc117afcf1" Jan 30 11:49:20 crc kubenswrapper[4869]: I0130 11:49:20.474554 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5801625224e0eef3ac8629b8829273fb25ddae1010eaf11e55afeedc117afcf1"} err="failed to get container status \"5801625224e0eef3ac8629b8829273fb25ddae1010eaf11e55afeedc117afcf1\": rpc error: code = NotFound desc = could not find container \"5801625224e0eef3ac8629b8829273fb25ddae1010eaf11e55afeedc117afcf1\": container with ID starting with 5801625224e0eef3ac8629b8829273fb25ddae1010eaf11e55afeedc117afcf1 not found: ID does not exist" Jan 30 11:49:20 crc kubenswrapper[4869]: I0130 11:49:20.474575 4869 scope.go:117] "RemoveContainer" containerID="45bafd1a911abdca7fd714f2f67afe74daa136c8b54700d6dfc25ba041b3ace1" Jan 30 11:49:20 crc kubenswrapper[4869]: E0130 11:49:20.474886 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45bafd1a911abdca7fd714f2f67afe74daa136c8b54700d6dfc25ba041b3ace1\": container with ID starting with 45bafd1a911abdca7fd714f2f67afe74daa136c8b54700d6dfc25ba041b3ace1 not found: ID does not exist" containerID="45bafd1a911abdca7fd714f2f67afe74daa136c8b54700d6dfc25ba041b3ace1" Jan 30 11:49:20 crc kubenswrapper[4869]: I0130 11:49:20.474928 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45bafd1a911abdca7fd714f2f67afe74daa136c8b54700d6dfc25ba041b3ace1"} err="failed to get container status \"45bafd1a911abdca7fd714f2f67afe74daa136c8b54700d6dfc25ba041b3ace1\": rpc error: code = NotFound desc = could not find container \"45bafd1a911abdca7fd714f2f67afe74daa136c8b54700d6dfc25ba041b3ace1\": container with ID starting with 45bafd1a911abdca7fd714f2f67afe74daa136c8b54700d6dfc25ba041b3ace1 not found: ID does not exist" Jan 30 11:49:20 crc kubenswrapper[4869]: I0130 11:49:20.484816 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1971f380-0365-45db-a3ca-40a2cd2ae484-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 11:49:20 crc kubenswrapper[4869]: I0130 11:49:20.484841 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmwwt\" (UniqueName: \"kubernetes.io/projected/1971f380-0365-45db-a3ca-40a2cd2ae484-kube-api-access-mmwwt\") on node \"crc\" DevicePath \"\"" Jan 30 11:49:20 crc kubenswrapper[4869]: I0130 11:49:20.484852 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1971f380-0365-45db-a3ca-40a2cd2ae484-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 11:49:20 crc kubenswrapper[4869]: I0130 11:49:20.755825 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9lqtj"] Jan 30 11:49:20 crc kubenswrapper[4869]: I0130 11:49:20.761473 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9lqtj"] Jan 30 11:49:22 crc kubenswrapper[4869]: I0130 11:49:22.151371 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1971f380-0365-45db-a3ca-40a2cd2ae484" path="/var/lib/kubelet/pods/1971f380-0365-45db-a3ca-40a2cd2ae484/volumes" Jan 30 11:49:23 crc kubenswrapper[4869]: I0130 11:49:23.132538 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:49:23 crc kubenswrapper[4869]: E0130 11:49:23.132805 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:49:37 crc kubenswrapper[4869]: I0130 11:49:37.133332 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:49:37 crc kubenswrapper[4869]: E0130 11:49:37.134046 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:49:50 crc kubenswrapper[4869]: I0130 11:49:50.137398 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:49:50 crc kubenswrapper[4869]: E0130 11:49:50.138078 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:50:01 crc kubenswrapper[4869]: I0130 11:50:01.133612 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:50:01 crc kubenswrapper[4869]: E0130 11:50:01.134346 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:50:14 crc kubenswrapper[4869]: I0130 11:50:14.134220 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:50:14 crc kubenswrapper[4869]: E0130 11:50:14.134928 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:50:25 crc kubenswrapper[4869]: I0130 11:50:25.133939 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:50:25 crc kubenswrapper[4869]: E0130 11:50:25.135040 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:50:37 crc kubenswrapper[4869]: I0130 11:50:37.132516 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:50:37 crc kubenswrapper[4869]: E0130 11:50:37.133285 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:50:48 crc kubenswrapper[4869]: I0130 11:50:48.132789 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:50:48 crc kubenswrapper[4869]: E0130 11:50:48.133493 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:51:01 crc kubenswrapper[4869]: I0130 11:51:01.134526 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:51:01 crc kubenswrapper[4869]: E0130 11:51:01.135301 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:51:16 crc kubenswrapper[4869]: I0130 11:51:16.133411 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:51:16 crc kubenswrapper[4869]: E0130 11:51:16.134176 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:51:31 crc kubenswrapper[4869]: I0130 11:51:31.132805 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:51:32 crc kubenswrapper[4869]: I0130 11:51:32.294308 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerStarted","Data":"94f570b1b967af4f8ae6075c92ac563b5379a48c31b5c2c0c62bb8a7c2c22978"} Jan 30 11:52:02 crc kubenswrapper[4869]: I0130 11:52:02.587488 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-76g95"] Jan 30 11:52:02 crc kubenswrapper[4869]: E0130 11:52:02.588305 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1971f380-0365-45db-a3ca-40a2cd2ae484" containerName="extract-content" Jan 30 11:52:02 crc kubenswrapper[4869]: I0130 11:52:02.588319 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="1971f380-0365-45db-a3ca-40a2cd2ae484" containerName="extract-content" Jan 30 11:52:02 crc kubenswrapper[4869]: E0130 11:52:02.588333 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1971f380-0365-45db-a3ca-40a2cd2ae484" containerName="extract-utilities" Jan 30 11:52:02 crc kubenswrapper[4869]: I0130 11:52:02.588339 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="1971f380-0365-45db-a3ca-40a2cd2ae484" containerName="extract-utilities" Jan 30 11:52:02 crc kubenswrapper[4869]: E0130 11:52:02.588365 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1971f380-0365-45db-a3ca-40a2cd2ae484" containerName="registry-server" Jan 30 11:52:02 crc kubenswrapper[4869]: I0130 11:52:02.588372 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="1971f380-0365-45db-a3ca-40a2cd2ae484" containerName="registry-server" Jan 30 11:52:02 crc kubenswrapper[4869]: I0130 11:52:02.588480 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="1971f380-0365-45db-a3ca-40a2cd2ae484" containerName="registry-server" Jan 30 11:52:02 crc kubenswrapper[4869]: I0130 11:52:02.589448 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-76g95" Jan 30 11:52:02 crc kubenswrapper[4869]: I0130 11:52:02.604136 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-76g95"] Jan 30 11:52:02 crc kubenswrapper[4869]: I0130 11:52:02.765243 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvqwk\" (UniqueName: \"kubernetes.io/projected/937ebccf-8a1c-460e-a97e-3d811f56ab6b-kube-api-access-gvqwk\") pod \"certified-operators-76g95\" (UID: \"937ebccf-8a1c-460e-a97e-3d811f56ab6b\") " pod="openshift-marketplace/certified-operators-76g95" Jan 30 11:52:02 crc kubenswrapper[4869]: I0130 11:52:02.765339 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/937ebccf-8a1c-460e-a97e-3d811f56ab6b-catalog-content\") pod \"certified-operators-76g95\" (UID: \"937ebccf-8a1c-460e-a97e-3d811f56ab6b\") " pod="openshift-marketplace/certified-operators-76g95" Jan 30 11:52:02 crc kubenswrapper[4869]: I0130 11:52:02.765367 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/937ebccf-8a1c-460e-a97e-3d811f56ab6b-utilities\") pod \"certified-operators-76g95\" (UID: \"937ebccf-8a1c-460e-a97e-3d811f56ab6b\") " pod="openshift-marketplace/certified-operators-76g95" Jan 30 11:52:02 crc kubenswrapper[4869]: I0130 11:52:02.866778 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/937ebccf-8a1c-460e-a97e-3d811f56ab6b-catalog-content\") pod \"certified-operators-76g95\" (UID: \"937ebccf-8a1c-460e-a97e-3d811f56ab6b\") " pod="openshift-marketplace/certified-operators-76g95" Jan 30 11:52:02 crc kubenswrapper[4869]: I0130 11:52:02.866914 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/937ebccf-8a1c-460e-a97e-3d811f56ab6b-utilities\") pod \"certified-operators-76g95\" (UID: \"937ebccf-8a1c-460e-a97e-3d811f56ab6b\") " pod="openshift-marketplace/certified-operators-76g95" Jan 30 11:52:02 crc kubenswrapper[4869]: I0130 11:52:02.867293 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/937ebccf-8a1c-460e-a97e-3d811f56ab6b-utilities\") pod \"certified-operators-76g95\" (UID: \"937ebccf-8a1c-460e-a97e-3d811f56ab6b\") " pod="openshift-marketplace/certified-operators-76g95" Jan 30 11:52:02 crc kubenswrapper[4869]: I0130 11:52:02.867285 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/937ebccf-8a1c-460e-a97e-3d811f56ab6b-catalog-content\") pod \"certified-operators-76g95\" (UID: \"937ebccf-8a1c-460e-a97e-3d811f56ab6b\") " pod="openshift-marketplace/certified-operators-76g95" Jan 30 11:52:02 crc kubenswrapper[4869]: I0130 11:52:02.867371 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvqwk\" (UniqueName: \"kubernetes.io/projected/937ebccf-8a1c-460e-a97e-3d811f56ab6b-kube-api-access-gvqwk\") pod \"certified-operators-76g95\" (UID: \"937ebccf-8a1c-460e-a97e-3d811f56ab6b\") " pod="openshift-marketplace/certified-operators-76g95" Jan 30 11:52:02 crc kubenswrapper[4869]: I0130 11:52:02.889400 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvqwk\" (UniqueName: \"kubernetes.io/projected/937ebccf-8a1c-460e-a97e-3d811f56ab6b-kube-api-access-gvqwk\") pod \"certified-operators-76g95\" (UID: \"937ebccf-8a1c-460e-a97e-3d811f56ab6b\") " pod="openshift-marketplace/certified-operators-76g95" Jan 30 11:52:02 crc kubenswrapper[4869]: I0130 11:52:02.908264 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-76g95" Jan 30 11:52:03 crc kubenswrapper[4869]: I0130 11:52:03.400291 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-76g95"] Jan 30 11:52:03 crc kubenswrapper[4869]: I0130 11:52:03.485629 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-76g95" event={"ID":"937ebccf-8a1c-460e-a97e-3d811f56ab6b","Type":"ContainerStarted","Data":"de69512ab71b8aff4681ffb657ea531d06754374460028ba90c525f965d0e554"} Jan 30 11:52:04 crc kubenswrapper[4869]: I0130 11:52:04.494341 4869 generic.go:334] "Generic (PLEG): container finished" podID="937ebccf-8a1c-460e-a97e-3d811f56ab6b" containerID="d79db0f14c2e20f79f5223e45b991628b70b75bbb4a3043c23b68d6cc4286f40" exitCode=0 Jan 30 11:52:04 crc kubenswrapper[4869]: I0130 11:52:04.494469 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-76g95" event={"ID":"937ebccf-8a1c-460e-a97e-3d811f56ab6b","Type":"ContainerDied","Data":"d79db0f14c2e20f79f5223e45b991628b70b75bbb4a3043c23b68d6cc4286f40"} Jan 30 11:52:05 crc kubenswrapper[4869]: I0130 11:52:05.504623 4869 generic.go:334] "Generic (PLEG): container finished" podID="937ebccf-8a1c-460e-a97e-3d811f56ab6b" containerID="f67014bbda714be3d1caf9ea5c807566725b1f75d1c36f1f57e8d8c943362f7f" exitCode=0 Jan 30 11:52:05 crc kubenswrapper[4869]: I0130 11:52:05.504930 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-76g95" event={"ID":"937ebccf-8a1c-460e-a97e-3d811f56ab6b","Type":"ContainerDied","Data":"f67014bbda714be3d1caf9ea5c807566725b1f75d1c36f1f57e8d8c943362f7f"} Jan 30 11:52:06 crc kubenswrapper[4869]: I0130 11:52:06.518412 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-76g95" event={"ID":"937ebccf-8a1c-460e-a97e-3d811f56ab6b","Type":"ContainerStarted","Data":"cffc677019baf9fb7831155ea1aab3c2487b4634f336efbf76a7f1d5aac06305"} Jan 30 11:52:06 crc kubenswrapper[4869]: I0130 11:52:06.539552 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-76g95" podStartSLOduration=3.134243504 podStartE2EDuration="4.539536264s" podCreationTimestamp="2026-01-30 11:52:02 +0000 UTC" firstStartedPulling="2026-01-30 11:52:04.496513974 +0000 UTC m=+3475.046390040" lastFinishedPulling="2026-01-30 11:52:05.901806734 +0000 UTC m=+3476.451682800" observedRunningTime="2026-01-30 11:52:06.536808717 +0000 UTC m=+3477.086684783" watchObservedRunningTime="2026-01-30 11:52:06.539536264 +0000 UTC m=+3477.089412330" Jan 30 11:52:12 crc kubenswrapper[4869]: I0130 11:52:12.909361 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-76g95" Jan 30 11:52:12 crc kubenswrapper[4869]: I0130 11:52:12.909970 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-76g95" Jan 30 11:52:12 crc kubenswrapper[4869]: I0130 11:52:12.950780 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-76g95" Jan 30 11:52:13 crc kubenswrapper[4869]: I0130 11:52:13.604435 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-76g95" Jan 30 11:52:13 crc kubenswrapper[4869]: I0130 11:52:13.646211 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-76g95"] Jan 30 11:52:15 crc kubenswrapper[4869]: I0130 11:52:15.578988 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-76g95" podUID="937ebccf-8a1c-460e-a97e-3d811f56ab6b" containerName="registry-server" containerID="cri-o://cffc677019baf9fb7831155ea1aab3c2487b4634f336efbf76a7f1d5aac06305" gracePeriod=2 Jan 30 11:52:16 crc kubenswrapper[4869]: I0130 11:52:16.586355 4869 generic.go:334] "Generic (PLEG): container finished" podID="937ebccf-8a1c-460e-a97e-3d811f56ab6b" containerID="cffc677019baf9fb7831155ea1aab3c2487b4634f336efbf76a7f1d5aac06305" exitCode=0 Jan 30 11:52:16 crc kubenswrapper[4869]: I0130 11:52:16.586677 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-76g95" event={"ID":"937ebccf-8a1c-460e-a97e-3d811f56ab6b","Type":"ContainerDied","Data":"cffc677019baf9fb7831155ea1aab3c2487b4634f336efbf76a7f1d5aac06305"} Jan 30 11:52:16 crc kubenswrapper[4869]: I0130 11:52:16.586722 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-76g95" event={"ID":"937ebccf-8a1c-460e-a97e-3d811f56ab6b","Type":"ContainerDied","Data":"de69512ab71b8aff4681ffb657ea531d06754374460028ba90c525f965d0e554"} Jan 30 11:52:16 crc kubenswrapper[4869]: I0130 11:52:16.586740 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de69512ab71b8aff4681ffb657ea531d06754374460028ba90c525f965d0e554" Jan 30 11:52:16 crc kubenswrapper[4869]: I0130 11:52:16.597582 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-76g95" Jan 30 11:52:16 crc kubenswrapper[4869]: I0130 11:52:16.684248 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/937ebccf-8a1c-460e-a97e-3d811f56ab6b-catalog-content\") pod \"937ebccf-8a1c-460e-a97e-3d811f56ab6b\" (UID: \"937ebccf-8a1c-460e-a97e-3d811f56ab6b\") " Jan 30 11:52:16 crc kubenswrapper[4869]: I0130 11:52:16.684298 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gvqwk\" (UniqueName: \"kubernetes.io/projected/937ebccf-8a1c-460e-a97e-3d811f56ab6b-kube-api-access-gvqwk\") pod \"937ebccf-8a1c-460e-a97e-3d811f56ab6b\" (UID: \"937ebccf-8a1c-460e-a97e-3d811f56ab6b\") " Jan 30 11:52:16 crc kubenswrapper[4869]: I0130 11:52:16.684417 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/937ebccf-8a1c-460e-a97e-3d811f56ab6b-utilities\") pod \"937ebccf-8a1c-460e-a97e-3d811f56ab6b\" (UID: \"937ebccf-8a1c-460e-a97e-3d811f56ab6b\") " Jan 30 11:52:16 crc kubenswrapper[4869]: I0130 11:52:16.685451 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/937ebccf-8a1c-460e-a97e-3d811f56ab6b-utilities" (OuterVolumeSpecName: "utilities") pod "937ebccf-8a1c-460e-a97e-3d811f56ab6b" (UID: "937ebccf-8a1c-460e-a97e-3d811f56ab6b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:52:16 crc kubenswrapper[4869]: I0130 11:52:16.689802 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/937ebccf-8a1c-460e-a97e-3d811f56ab6b-kube-api-access-gvqwk" (OuterVolumeSpecName: "kube-api-access-gvqwk") pod "937ebccf-8a1c-460e-a97e-3d811f56ab6b" (UID: "937ebccf-8a1c-460e-a97e-3d811f56ab6b"). InnerVolumeSpecName "kube-api-access-gvqwk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:52:16 crc kubenswrapper[4869]: I0130 11:52:16.732919 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/937ebccf-8a1c-460e-a97e-3d811f56ab6b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "937ebccf-8a1c-460e-a97e-3d811f56ab6b" (UID: "937ebccf-8a1c-460e-a97e-3d811f56ab6b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:52:16 crc kubenswrapper[4869]: I0130 11:52:16.785632 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/937ebccf-8a1c-460e-a97e-3d811f56ab6b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 11:52:16 crc kubenswrapper[4869]: I0130 11:52:16.785664 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gvqwk\" (UniqueName: \"kubernetes.io/projected/937ebccf-8a1c-460e-a97e-3d811f56ab6b-kube-api-access-gvqwk\") on node \"crc\" DevicePath \"\"" Jan 30 11:52:16 crc kubenswrapper[4869]: I0130 11:52:16.785676 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/937ebccf-8a1c-460e-a97e-3d811f56ab6b-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 11:52:17 crc kubenswrapper[4869]: I0130 11:52:17.591847 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-76g95" Jan 30 11:52:17 crc kubenswrapper[4869]: I0130 11:52:17.641366 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-76g95"] Jan 30 11:52:17 crc kubenswrapper[4869]: I0130 11:52:17.645154 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-76g95"] Jan 30 11:52:17 crc kubenswrapper[4869]: E0130 11:52:17.668884 4869 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod937ebccf_8a1c_460e_a97e_3d811f56ab6b.slice/crio-de69512ab71b8aff4681ffb657ea531d06754374460028ba90c525f965d0e554\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod937ebccf_8a1c_460e_a97e_3d811f56ab6b.slice\": RecentStats: unable to find data in memory cache]" Jan 30 11:52:18 crc kubenswrapper[4869]: I0130 11:52:18.140973 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="937ebccf-8a1c-460e-a97e-3d811f56ab6b" path="/var/lib/kubelet/pods/937ebccf-8a1c-460e-a97e-3d811f56ab6b/volumes" Jan 30 11:53:51 crc kubenswrapper[4869]: I0130 11:53:51.769885 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:53:51 crc kubenswrapper[4869]: I0130 11:53:51.771358 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:54:21 crc kubenswrapper[4869]: I0130 11:54:21.769833 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:54:21 crc kubenswrapper[4869]: I0130 11:54:21.770406 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:54:51 crc kubenswrapper[4869]: I0130 11:54:51.769674 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:54:51 crc kubenswrapper[4869]: I0130 11:54:51.770221 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:54:51 crc kubenswrapper[4869]: I0130 11:54:51.770275 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 11:54:51 crc kubenswrapper[4869]: I0130 11:54:51.771129 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"94f570b1b967af4f8ae6075c92ac563b5379a48c31b5c2c0c62bb8a7c2c22978"} pod="openshift-machine-config-operator/machine-config-daemon-99lr2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 11:54:51 crc kubenswrapper[4869]: I0130 11:54:51.771185 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" containerID="cri-o://94f570b1b967af4f8ae6075c92ac563b5379a48c31b5c2c0c62bb8a7c2c22978" gracePeriod=600 Jan 30 11:54:52 crc kubenswrapper[4869]: I0130 11:54:52.583018 4869 generic.go:334] "Generic (PLEG): container finished" podID="ef13186b-7f82-4025-97e3-d899be8c207f" containerID="94f570b1b967af4f8ae6075c92ac563b5379a48c31b5c2c0c62bb8a7c2c22978" exitCode=0 Jan 30 11:54:52 crc kubenswrapper[4869]: I0130 11:54:52.583112 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerDied","Data":"94f570b1b967af4f8ae6075c92ac563b5379a48c31b5c2c0c62bb8a7c2c22978"} Jan 30 11:54:52 crc kubenswrapper[4869]: I0130 11:54:52.583411 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerStarted","Data":"18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4"} Jan 30 11:54:52 crc kubenswrapper[4869]: I0130 11:54:52.583440 4869 scope.go:117] "RemoveContainer" containerID="64daf258e75ff3b31a829083ca8950f9991e441a11bc45d64c0e73239445efe8" Jan 30 11:55:27 crc kubenswrapper[4869]: I0130 11:55:27.193560 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-72xfv"] Jan 30 11:55:27 crc kubenswrapper[4869]: E0130 11:55:27.194402 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="937ebccf-8a1c-460e-a97e-3d811f56ab6b" containerName="extract-utilities" Jan 30 11:55:27 crc kubenswrapper[4869]: I0130 11:55:27.194417 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="937ebccf-8a1c-460e-a97e-3d811f56ab6b" containerName="extract-utilities" Jan 30 11:55:27 crc kubenswrapper[4869]: E0130 11:55:27.194432 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="937ebccf-8a1c-460e-a97e-3d811f56ab6b" containerName="extract-content" Jan 30 11:55:27 crc kubenswrapper[4869]: I0130 11:55:27.194441 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="937ebccf-8a1c-460e-a97e-3d811f56ab6b" containerName="extract-content" Jan 30 11:55:27 crc kubenswrapper[4869]: E0130 11:55:27.194476 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="937ebccf-8a1c-460e-a97e-3d811f56ab6b" containerName="registry-server" Jan 30 11:55:27 crc kubenswrapper[4869]: I0130 11:55:27.194485 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="937ebccf-8a1c-460e-a97e-3d811f56ab6b" containerName="registry-server" Jan 30 11:55:27 crc kubenswrapper[4869]: I0130 11:55:27.194658 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="937ebccf-8a1c-460e-a97e-3d811f56ab6b" containerName="registry-server" Jan 30 11:55:27 crc kubenswrapper[4869]: I0130 11:55:27.195661 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-72xfv" Jan 30 11:55:27 crc kubenswrapper[4869]: I0130 11:55:27.208857 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-72xfv"] Jan 30 11:55:27 crc kubenswrapper[4869]: I0130 11:55:27.300243 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grml9\" (UniqueName: \"kubernetes.io/projected/52f6563d-9a78-4b18-b8a8-5505d2874a84-kube-api-access-grml9\") pod \"redhat-operators-72xfv\" (UID: \"52f6563d-9a78-4b18-b8a8-5505d2874a84\") " pod="openshift-marketplace/redhat-operators-72xfv" Jan 30 11:55:27 crc kubenswrapper[4869]: I0130 11:55:27.300293 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52f6563d-9a78-4b18-b8a8-5505d2874a84-utilities\") pod \"redhat-operators-72xfv\" (UID: \"52f6563d-9a78-4b18-b8a8-5505d2874a84\") " pod="openshift-marketplace/redhat-operators-72xfv" Jan 30 11:55:27 crc kubenswrapper[4869]: I0130 11:55:27.300399 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52f6563d-9a78-4b18-b8a8-5505d2874a84-catalog-content\") pod \"redhat-operators-72xfv\" (UID: \"52f6563d-9a78-4b18-b8a8-5505d2874a84\") " pod="openshift-marketplace/redhat-operators-72xfv" Jan 30 11:55:27 crc kubenswrapper[4869]: I0130 11:55:27.401316 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grml9\" (UniqueName: \"kubernetes.io/projected/52f6563d-9a78-4b18-b8a8-5505d2874a84-kube-api-access-grml9\") pod \"redhat-operators-72xfv\" (UID: \"52f6563d-9a78-4b18-b8a8-5505d2874a84\") " pod="openshift-marketplace/redhat-operators-72xfv" Jan 30 11:55:27 crc kubenswrapper[4869]: I0130 11:55:27.401363 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52f6563d-9a78-4b18-b8a8-5505d2874a84-utilities\") pod \"redhat-operators-72xfv\" (UID: \"52f6563d-9a78-4b18-b8a8-5505d2874a84\") " pod="openshift-marketplace/redhat-operators-72xfv" Jan 30 11:55:27 crc kubenswrapper[4869]: I0130 11:55:27.401407 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52f6563d-9a78-4b18-b8a8-5505d2874a84-catalog-content\") pod \"redhat-operators-72xfv\" (UID: \"52f6563d-9a78-4b18-b8a8-5505d2874a84\") " pod="openshift-marketplace/redhat-operators-72xfv" Jan 30 11:55:27 crc kubenswrapper[4869]: I0130 11:55:27.401917 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52f6563d-9a78-4b18-b8a8-5505d2874a84-catalog-content\") pod \"redhat-operators-72xfv\" (UID: \"52f6563d-9a78-4b18-b8a8-5505d2874a84\") " pod="openshift-marketplace/redhat-operators-72xfv" Jan 30 11:55:27 crc kubenswrapper[4869]: I0130 11:55:27.402286 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52f6563d-9a78-4b18-b8a8-5505d2874a84-utilities\") pod \"redhat-operators-72xfv\" (UID: \"52f6563d-9a78-4b18-b8a8-5505d2874a84\") " pod="openshift-marketplace/redhat-operators-72xfv" Jan 30 11:55:27 crc kubenswrapper[4869]: I0130 11:55:27.420619 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grml9\" (UniqueName: \"kubernetes.io/projected/52f6563d-9a78-4b18-b8a8-5505d2874a84-kube-api-access-grml9\") pod \"redhat-operators-72xfv\" (UID: \"52f6563d-9a78-4b18-b8a8-5505d2874a84\") " pod="openshift-marketplace/redhat-operators-72xfv" Jan 30 11:55:27 crc kubenswrapper[4869]: I0130 11:55:27.515318 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-72xfv" Jan 30 11:55:27 crc kubenswrapper[4869]: I0130 11:55:27.946771 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-72xfv"] Jan 30 11:55:28 crc kubenswrapper[4869]: I0130 11:55:28.824534 4869 generic.go:334] "Generic (PLEG): container finished" podID="52f6563d-9a78-4b18-b8a8-5505d2874a84" containerID="2ee4537de834d0bb3bf16d984b329edd1f78ce10067d06a9020fa5fa81e9e8bc" exitCode=0 Jan 30 11:55:28 crc kubenswrapper[4869]: I0130 11:55:28.824775 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-72xfv" event={"ID":"52f6563d-9a78-4b18-b8a8-5505d2874a84","Type":"ContainerDied","Data":"2ee4537de834d0bb3bf16d984b329edd1f78ce10067d06a9020fa5fa81e9e8bc"} Jan 30 11:55:28 crc kubenswrapper[4869]: I0130 11:55:28.824831 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-72xfv" event={"ID":"52f6563d-9a78-4b18-b8a8-5505d2874a84","Type":"ContainerStarted","Data":"59ac3aea8880b1aec37773baaa2b56c9d6896e320bc61c80eefa1bf963833ac2"} Jan 30 11:55:28 crc kubenswrapper[4869]: I0130 11:55:28.826793 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 11:55:35 crc kubenswrapper[4869]: I0130 11:55:35.870496 4869 generic.go:334] "Generic (PLEG): container finished" podID="52f6563d-9a78-4b18-b8a8-5505d2874a84" containerID="0336dccf27eea499ba826d3ece53a5e23d03ebe495b2f6f9d74f81deb25b5e3c" exitCode=0 Jan 30 11:55:35 crc kubenswrapper[4869]: I0130 11:55:35.870601 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-72xfv" event={"ID":"52f6563d-9a78-4b18-b8a8-5505d2874a84","Type":"ContainerDied","Data":"0336dccf27eea499ba826d3ece53a5e23d03ebe495b2f6f9d74f81deb25b5e3c"} Jan 30 11:55:36 crc kubenswrapper[4869]: I0130 11:55:36.878589 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-72xfv" event={"ID":"52f6563d-9a78-4b18-b8a8-5505d2874a84","Type":"ContainerStarted","Data":"df160e518a0ffc89003caf485bfa4c58e8388de2b2b06ce676213603b4a80350"} Jan 30 11:55:36 crc kubenswrapper[4869]: I0130 11:55:36.896181 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-72xfv" podStartSLOduration=2.409948957 podStartE2EDuration="9.896157446s" podCreationTimestamp="2026-01-30 11:55:27 +0000 UTC" firstStartedPulling="2026-01-30 11:55:28.826591679 +0000 UTC m=+3679.376467735" lastFinishedPulling="2026-01-30 11:55:36.312800158 +0000 UTC m=+3686.862676224" observedRunningTime="2026-01-30 11:55:36.892597164 +0000 UTC m=+3687.442473230" watchObservedRunningTime="2026-01-30 11:55:36.896157446 +0000 UTC m=+3687.446033512" Jan 30 11:55:37 crc kubenswrapper[4869]: I0130 11:55:37.516610 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-72xfv" Jan 30 11:55:37 crc kubenswrapper[4869]: I0130 11:55:37.516664 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-72xfv" Jan 30 11:55:38 crc kubenswrapper[4869]: I0130 11:55:38.559368 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-72xfv" podUID="52f6563d-9a78-4b18-b8a8-5505d2874a84" containerName="registry-server" probeResult="failure" output=< Jan 30 11:55:38 crc kubenswrapper[4869]: timeout: failed to connect service ":50051" within 1s Jan 30 11:55:38 crc kubenswrapper[4869]: > Jan 30 11:55:47 crc kubenswrapper[4869]: I0130 11:55:47.574462 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-72xfv" Jan 30 11:55:47 crc kubenswrapper[4869]: I0130 11:55:47.618819 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-72xfv" Jan 30 11:55:47 crc kubenswrapper[4869]: I0130 11:55:47.687246 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-72xfv"] Jan 30 11:55:47 crc kubenswrapper[4869]: I0130 11:55:47.807802 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-c9rvm"] Jan 30 11:55:47 crc kubenswrapper[4869]: I0130 11:55:47.809806 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-c9rvm" podUID="51e1e87e-a127-4a53-9395-a32a304f638c" containerName="registry-server" containerID="cri-o://0404094534ee99ad1d5cb91b2b42bc6f72fa21cddcda828d63db5bffcef4e1cb" gracePeriod=2 Jan 30 11:55:47 crc kubenswrapper[4869]: I0130 11:55:47.978789 4869 generic.go:334] "Generic (PLEG): container finished" podID="51e1e87e-a127-4a53-9395-a32a304f638c" containerID="0404094534ee99ad1d5cb91b2b42bc6f72fa21cddcda828d63db5bffcef4e1cb" exitCode=0 Jan 30 11:55:47 crc kubenswrapper[4869]: I0130 11:55:47.979596 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c9rvm" event={"ID":"51e1e87e-a127-4a53-9395-a32a304f638c","Type":"ContainerDied","Data":"0404094534ee99ad1d5cb91b2b42bc6f72fa21cddcda828d63db5bffcef4e1cb"} Jan 30 11:55:48 crc kubenswrapper[4869]: I0130 11:55:48.259326 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c9rvm" Jan 30 11:55:48 crc kubenswrapper[4869]: I0130 11:55:48.409374 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51e1e87e-a127-4a53-9395-a32a304f638c-utilities\") pod \"51e1e87e-a127-4a53-9395-a32a304f638c\" (UID: \"51e1e87e-a127-4a53-9395-a32a304f638c\") " Jan 30 11:55:48 crc kubenswrapper[4869]: I0130 11:55:48.409432 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51e1e87e-a127-4a53-9395-a32a304f638c-catalog-content\") pod \"51e1e87e-a127-4a53-9395-a32a304f638c\" (UID: \"51e1e87e-a127-4a53-9395-a32a304f638c\") " Jan 30 11:55:48 crc kubenswrapper[4869]: I0130 11:55:48.409512 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qgr4x\" (UniqueName: \"kubernetes.io/projected/51e1e87e-a127-4a53-9395-a32a304f638c-kube-api-access-qgr4x\") pod \"51e1e87e-a127-4a53-9395-a32a304f638c\" (UID: \"51e1e87e-a127-4a53-9395-a32a304f638c\") " Jan 30 11:55:48 crc kubenswrapper[4869]: I0130 11:55:48.413226 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51e1e87e-a127-4a53-9395-a32a304f638c-utilities" (OuterVolumeSpecName: "utilities") pod "51e1e87e-a127-4a53-9395-a32a304f638c" (UID: "51e1e87e-a127-4a53-9395-a32a304f638c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:55:48 crc kubenswrapper[4869]: I0130 11:55:48.424844 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51e1e87e-a127-4a53-9395-a32a304f638c-kube-api-access-qgr4x" (OuterVolumeSpecName: "kube-api-access-qgr4x") pod "51e1e87e-a127-4a53-9395-a32a304f638c" (UID: "51e1e87e-a127-4a53-9395-a32a304f638c"). InnerVolumeSpecName "kube-api-access-qgr4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:55:48 crc kubenswrapper[4869]: I0130 11:55:48.513957 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qgr4x\" (UniqueName: \"kubernetes.io/projected/51e1e87e-a127-4a53-9395-a32a304f638c-kube-api-access-qgr4x\") on node \"crc\" DevicePath \"\"" Jan 30 11:55:48 crc kubenswrapper[4869]: I0130 11:55:48.513990 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51e1e87e-a127-4a53-9395-a32a304f638c-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 11:55:48 crc kubenswrapper[4869]: I0130 11:55:48.520392 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51e1e87e-a127-4a53-9395-a32a304f638c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "51e1e87e-a127-4a53-9395-a32a304f638c" (UID: "51e1e87e-a127-4a53-9395-a32a304f638c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:55:48 crc kubenswrapper[4869]: I0130 11:55:48.615226 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51e1e87e-a127-4a53-9395-a32a304f638c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 11:55:48 crc kubenswrapper[4869]: I0130 11:55:48.988227 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c9rvm" event={"ID":"51e1e87e-a127-4a53-9395-a32a304f638c","Type":"ContainerDied","Data":"74f50934ef9da057e382954f1af04e6e599df9cf048ec78c967071a00b515a55"} Jan 30 11:55:48 crc kubenswrapper[4869]: I0130 11:55:48.988285 4869 scope.go:117] "RemoveContainer" containerID="0404094534ee99ad1d5cb91b2b42bc6f72fa21cddcda828d63db5bffcef4e1cb" Jan 30 11:55:48 crc kubenswrapper[4869]: I0130 11:55:48.988297 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c9rvm" Jan 30 11:55:49 crc kubenswrapper[4869]: I0130 11:55:49.007942 4869 scope.go:117] "RemoveContainer" containerID="4eb12e8adabe16b39869d8eb7420cb818bfee76072e95cad831ab4c559af0c8b" Jan 30 11:55:49 crc kubenswrapper[4869]: I0130 11:55:49.029137 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-c9rvm"] Jan 30 11:55:49 crc kubenswrapper[4869]: I0130 11:55:49.033985 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-c9rvm"] Jan 30 11:55:49 crc kubenswrapper[4869]: I0130 11:55:49.035517 4869 scope.go:117] "RemoveContainer" containerID="2964a733a289a747bf2abfe24efe236027989983f9ca2a89fed6f09ef2d8fbbb" Jan 30 11:55:50 crc kubenswrapper[4869]: I0130 11:55:50.143432 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51e1e87e-a127-4a53-9395-a32a304f638c" path="/var/lib/kubelet/pods/51e1e87e-a127-4a53-9395-a32a304f638c/volumes" Jan 30 11:55:53 crc kubenswrapper[4869]: I0130 11:55:53.417270 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-m8nrw"] Jan 30 11:55:53 crc kubenswrapper[4869]: E0130 11:55:53.419026 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51e1e87e-a127-4a53-9395-a32a304f638c" containerName="extract-utilities" Jan 30 11:55:53 crc kubenswrapper[4869]: I0130 11:55:53.419119 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="51e1e87e-a127-4a53-9395-a32a304f638c" containerName="extract-utilities" Jan 30 11:55:53 crc kubenswrapper[4869]: E0130 11:55:53.419196 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51e1e87e-a127-4a53-9395-a32a304f638c" containerName="registry-server" Jan 30 11:55:53 crc kubenswrapper[4869]: I0130 11:55:53.419354 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="51e1e87e-a127-4a53-9395-a32a304f638c" containerName="registry-server" Jan 30 11:55:53 crc kubenswrapper[4869]: E0130 11:55:53.419427 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51e1e87e-a127-4a53-9395-a32a304f638c" containerName="extract-content" Jan 30 11:55:53 crc kubenswrapper[4869]: I0130 11:55:53.419496 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="51e1e87e-a127-4a53-9395-a32a304f638c" containerName="extract-content" Jan 30 11:55:53 crc kubenswrapper[4869]: I0130 11:55:53.419807 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="51e1e87e-a127-4a53-9395-a32a304f638c" containerName="registry-server" Jan 30 11:55:53 crc kubenswrapper[4869]: I0130 11:55:53.423204 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m8nrw" Jan 30 11:55:53 crc kubenswrapper[4869]: I0130 11:55:53.442926 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m8nrw"] Jan 30 11:55:53 crc kubenswrapper[4869]: I0130 11:55:53.583380 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f048bb0b-244b-4e61-84e7-f94d91c8da40-catalog-content\") pod \"redhat-marketplace-m8nrw\" (UID: \"f048bb0b-244b-4e61-84e7-f94d91c8da40\") " pod="openshift-marketplace/redhat-marketplace-m8nrw" Jan 30 11:55:53 crc kubenswrapper[4869]: I0130 11:55:53.583482 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f048bb0b-244b-4e61-84e7-f94d91c8da40-utilities\") pod \"redhat-marketplace-m8nrw\" (UID: \"f048bb0b-244b-4e61-84e7-f94d91c8da40\") " pod="openshift-marketplace/redhat-marketplace-m8nrw" Jan 30 11:55:53 crc kubenswrapper[4869]: I0130 11:55:53.583509 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p279q\" (UniqueName: \"kubernetes.io/projected/f048bb0b-244b-4e61-84e7-f94d91c8da40-kube-api-access-p279q\") pod \"redhat-marketplace-m8nrw\" (UID: \"f048bb0b-244b-4e61-84e7-f94d91c8da40\") " pod="openshift-marketplace/redhat-marketplace-m8nrw" Jan 30 11:55:53 crc kubenswrapper[4869]: I0130 11:55:53.685494 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f048bb0b-244b-4e61-84e7-f94d91c8da40-utilities\") pod \"redhat-marketplace-m8nrw\" (UID: \"f048bb0b-244b-4e61-84e7-f94d91c8da40\") " pod="openshift-marketplace/redhat-marketplace-m8nrw" Jan 30 11:55:53 crc kubenswrapper[4869]: I0130 11:55:53.685576 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p279q\" (UniqueName: \"kubernetes.io/projected/f048bb0b-244b-4e61-84e7-f94d91c8da40-kube-api-access-p279q\") pod \"redhat-marketplace-m8nrw\" (UID: \"f048bb0b-244b-4e61-84e7-f94d91c8da40\") " pod="openshift-marketplace/redhat-marketplace-m8nrw" Jan 30 11:55:53 crc kubenswrapper[4869]: I0130 11:55:53.685663 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f048bb0b-244b-4e61-84e7-f94d91c8da40-catalog-content\") pod \"redhat-marketplace-m8nrw\" (UID: \"f048bb0b-244b-4e61-84e7-f94d91c8da40\") " pod="openshift-marketplace/redhat-marketplace-m8nrw" Jan 30 11:55:53 crc kubenswrapper[4869]: I0130 11:55:53.686348 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f048bb0b-244b-4e61-84e7-f94d91c8da40-catalog-content\") pod \"redhat-marketplace-m8nrw\" (UID: \"f048bb0b-244b-4e61-84e7-f94d91c8da40\") " pod="openshift-marketplace/redhat-marketplace-m8nrw" Jan 30 11:55:53 crc kubenswrapper[4869]: I0130 11:55:53.686778 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f048bb0b-244b-4e61-84e7-f94d91c8da40-utilities\") pod \"redhat-marketplace-m8nrw\" (UID: \"f048bb0b-244b-4e61-84e7-f94d91c8da40\") " pod="openshift-marketplace/redhat-marketplace-m8nrw" Jan 30 11:55:53 crc kubenswrapper[4869]: I0130 11:55:53.711104 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p279q\" (UniqueName: \"kubernetes.io/projected/f048bb0b-244b-4e61-84e7-f94d91c8da40-kube-api-access-p279q\") pod \"redhat-marketplace-m8nrw\" (UID: \"f048bb0b-244b-4e61-84e7-f94d91c8da40\") " pod="openshift-marketplace/redhat-marketplace-m8nrw" Jan 30 11:55:53 crc kubenswrapper[4869]: I0130 11:55:53.749599 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m8nrw" Jan 30 11:55:54 crc kubenswrapper[4869]: I0130 11:55:54.208036 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m8nrw"] Jan 30 11:55:54 crc kubenswrapper[4869]: W0130 11:55:54.209468 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf048bb0b_244b_4e61_84e7_f94d91c8da40.slice/crio-64595b23a32f65911a177c2623fe6a82ac49c449e4e7bb44b2e1f8427768f441 WatchSource:0}: Error finding container 64595b23a32f65911a177c2623fe6a82ac49c449e4e7bb44b2e1f8427768f441: Status 404 returned error can't find the container with id 64595b23a32f65911a177c2623fe6a82ac49c449e4e7bb44b2e1f8427768f441 Jan 30 11:55:55 crc kubenswrapper[4869]: I0130 11:55:55.041862 4869 generic.go:334] "Generic (PLEG): container finished" podID="f048bb0b-244b-4e61-84e7-f94d91c8da40" containerID="31751c1e0cc305f4a7c72fa554178619f6d2c8f81cdd7b2e5c4e4e4564cb9255" exitCode=0 Jan 30 11:55:55 crc kubenswrapper[4869]: I0130 11:55:55.041936 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m8nrw" event={"ID":"f048bb0b-244b-4e61-84e7-f94d91c8da40","Type":"ContainerDied","Data":"31751c1e0cc305f4a7c72fa554178619f6d2c8f81cdd7b2e5c4e4e4564cb9255"} Jan 30 11:55:55 crc kubenswrapper[4869]: I0130 11:55:55.042046 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m8nrw" event={"ID":"f048bb0b-244b-4e61-84e7-f94d91c8da40","Type":"ContainerStarted","Data":"64595b23a32f65911a177c2623fe6a82ac49c449e4e7bb44b2e1f8427768f441"} Jan 30 11:55:56 crc kubenswrapper[4869]: I0130 11:55:56.050341 4869 generic.go:334] "Generic (PLEG): container finished" podID="f048bb0b-244b-4e61-84e7-f94d91c8da40" containerID="68714ce457f72fd6a09bad237129e635243b08ef7d02128699c0462c88a2a5e8" exitCode=0 Jan 30 11:55:56 crc kubenswrapper[4869]: I0130 11:55:56.050450 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m8nrw" event={"ID":"f048bb0b-244b-4e61-84e7-f94d91c8da40","Type":"ContainerDied","Data":"68714ce457f72fd6a09bad237129e635243b08ef7d02128699c0462c88a2a5e8"} Jan 30 11:55:57 crc kubenswrapper[4869]: I0130 11:55:57.059484 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m8nrw" event={"ID":"f048bb0b-244b-4e61-84e7-f94d91c8da40","Type":"ContainerStarted","Data":"9ed6f1bcdb5c49d6b217837251e3dcfd42c2326811eb1fa910c29cadaaaf137e"} Jan 30 11:55:57 crc kubenswrapper[4869]: I0130 11:55:57.079531 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-m8nrw" podStartSLOduration=2.621253373 podStartE2EDuration="4.079510537s" podCreationTimestamp="2026-01-30 11:55:53 +0000 UTC" firstStartedPulling="2026-01-30 11:55:55.044425021 +0000 UTC m=+3705.594301087" lastFinishedPulling="2026-01-30 11:55:56.502682185 +0000 UTC m=+3707.052558251" observedRunningTime="2026-01-30 11:55:57.077581312 +0000 UTC m=+3707.627457388" watchObservedRunningTime="2026-01-30 11:55:57.079510537 +0000 UTC m=+3707.629386603" Jan 30 11:56:03 crc kubenswrapper[4869]: I0130 11:56:03.750337 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-m8nrw" Jan 30 11:56:03 crc kubenswrapper[4869]: I0130 11:56:03.750926 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-m8nrw" Jan 30 11:56:03 crc kubenswrapper[4869]: I0130 11:56:03.794921 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-m8nrw" Jan 30 11:56:04 crc kubenswrapper[4869]: I0130 11:56:04.160334 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-m8nrw" Jan 30 11:56:04 crc kubenswrapper[4869]: I0130 11:56:04.213740 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-m8nrw"] Jan 30 11:56:06 crc kubenswrapper[4869]: I0130 11:56:06.127796 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-m8nrw" podUID="f048bb0b-244b-4e61-84e7-f94d91c8da40" containerName="registry-server" containerID="cri-o://9ed6f1bcdb5c49d6b217837251e3dcfd42c2326811eb1fa910c29cadaaaf137e" gracePeriod=2 Jan 30 11:56:06 crc kubenswrapper[4869]: I0130 11:56:06.499124 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m8nrw" Jan 30 11:56:06 crc kubenswrapper[4869]: I0130 11:56:06.680161 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f048bb0b-244b-4e61-84e7-f94d91c8da40-catalog-content\") pod \"f048bb0b-244b-4e61-84e7-f94d91c8da40\" (UID: \"f048bb0b-244b-4e61-84e7-f94d91c8da40\") " Jan 30 11:56:06 crc kubenswrapper[4869]: I0130 11:56:06.680250 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f048bb0b-244b-4e61-84e7-f94d91c8da40-utilities\") pod \"f048bb0b-244b-4e61-84e7-f94d91c8da40\" (UID: \"f048bb0b-244b-4e61-84e7-f94d91c8da40\") " Jan 30 11:56:06 crc kubenswrapper[4869]: I0130 11:56:06.680302 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p279q\" (UniqueName: \"kubernetes.io/projected/f048bb0b-244b-4e61-84e7-f94d91c8da40-kube-api-access-p279q\") pod \"f048bb0b-244b-4e61-84e7-f94d91c8da40\" (UID: \"f048bb0b-244b-4e61-84e7-f94d91c8da40\") " Jan 30 11:56:06 crc kubenswrapper[4869]: I0130 11:56:06.681891 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f048bb0b-244b-4e61-84e7-f94d91c8da40-utilities" (OuterVolumeSpecName: "utilities") pod "f048bb0b-244b-4e61-84e7-f94d91c8da40" (UID: "f048bb0b-244b-4e61-84e7-f94d91c8da40"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:56:06 crc kubenswrapper[4869]: I0130 11:56:06.686410 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f048bb0b-244b-4e61-84e7-f94d91c8da40-kube-api-access-p279q" (OuterVolumeSpecName: "kube-api-access-p279q") pod "f048bb0b-244b-4e61-84e7-f94d91c8da40" (UID: "f048bb0b-244b-4e61-84e7-f94d91c8da40"). InnerVolumeSpecName "kube-api-access-p279q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 11:56:06 crc kubenswrapper[4869]: I0130 11:56:06.705694 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f048bb0b-244b-4e61-84e7-f94d91c8da40-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f048bb0b-244b-4e61-84e7-f94d91c8da40" (UID: "f048bb0b-244b-4e61-84e7-f94d91c8da40"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 11:56:06 crc kubenswrapper[4869]: I0130 11:56:06.782364 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f048bb0b-244b-4e61-84e7-f94d91c8da40-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 11:56:06 crc kubenswrapper[4869]: I0130 11:56:06.782599 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f048bb0b-244b-4e61-84e7-f94d91c8da40-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 11:56:06 crc kubenswrapper[4869]: I0130 11:56:06.782680 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p279q\" (UniqueName: \"kubernetes.io/projected/f048bb0b-244b-4e61-84e7-f94d91c8da40-kube-api-access-p279q\") on node \"crc\" DevicePath \"\"" Jan 30 11:56:07 crc kubenswrapper[4869]: I0130 11:56:07.136517 4869 generic.go:334] "Generic (PLEG): container finished" podID="f048bb0b-244b-4e61-84e7-f94d91c8da40" containerID="9ed6f1bcdb5c49d6b217837251e3dcfd42c2326811eb1fa910c29cadaaaf137e" exitCode=0 Jan 30 11:56:07 crc kubenswrapper[4869]: I0130 11:56:07.136575 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m8nrw" Jan 30 11:56:07 crc kubenswrapper[4869]: I0130 11:56:07.136585 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m8nrw" event={"ID":"f048bb0b-244b-4e61-84e7-f94d91c8da40","Type":"ContainerDied","Data":"9ed6f1bcdb5c49d6b217837251e3dcfd42c2326811eb1fa910c29cadaaaf137e"} Jan 30 11:56:07 crc kubenswrapper[4869]: I0130 11:56:07.137380 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m8nrw" event={"ID":"f048bb0b-244b-4e61-84e7-f94d91c8da40","Type":"ContainerDied","Data":"64595b23a32f65911a177c2623fe6a82ac49c449e4e7bb44b2e1f8427768f441"} Jan 30 11:56:07 crc kubenswrapper[4869]: I0130 11:56:07.137402 4869 scope.go:117] "RemoveContainer" containerID="9ed6f1bcdb5c49d6b217837251e3dcfd42c2326811eb1fa910c29cadaaaf137e" Jan 30 11:56:07 crc kubenswrapper[4869]: I0130 11:56:07.154325 4869 scope.go:117] "RemoveContainer" containerID="68714ce457f72fd6a09bad237129e635243b08ef7d02128699c0462c88a2a5e8" Jan 30 11:56:07 crc kubenswrapper[4869]: I0130 11:56:07.170079 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-m8nrw"] Jan 30 11:56:07 crc kubenswrapper[4869]: I0130 11:56:07.178113 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-m8nrw"] Jan 30 11:56:07 crc kubenswrapper[4869]: I0130 11:56:07.185081 4869 scope.go:117] "RemoveContainer" containerID="31751c1e0cc305f4a7c72fa554178619f6d2c8f81cdd7b2e5c4e4e4564cb9255" Jan 30 11:56:07 crc kubenswrapper[4869]: I0130 11:56:07.201205 4869 scope.go:117] "RemoveContainer" containerID="9ed6f1bcdb5c49d6b217837251e3dcfd42c2326811eb1fa910c29cadaaaf137e" Jan 30 11:56:07 crc kubenswrapper[4869]: E0130 11:56:07.201699 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ed6f1bcdb5c49d6b217837251e3dcfd42c2326811eb1fa910c29cadaaaf137e\": container with ID starting with 9ed6f1bcdb5c49d6b217837251e3dcfd42c2326811eb1fa910c29cadaaaf137e not found: ID does not exist" containerID="9ed6f1bcdb5c49d6b217837251e3dcfd42c2326811eb1fa910c29cadaaaf137e" Jan 30 11:56:07 crc kubenswrapper[4869]: I0130 11:56:07.201762 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ed6f1bcdb5c49d6b217837251e3dcfd42c2326811eb1fa910c29cadaaaf137e"} err="failed to get container status \"9ed6f1bcdb5c49d6b217837251e3dcfd42c2326811eb1fa910c29cadaaaf137e\": rpc error: code = NotFound desc = could not find container \"9ed6f1bcdb5c49d6b217837251e3dcfd42c2326811eb1fa910c29cadaaaf137e\": container with ID starting with 9ed6f1bcdb5c49d6b217837251e3dcfd42c2326811eb1fa910c29cadaaaf137e not found: ID does not exist" Jan 30 11:56:07 crc kubenswrapper[4869]: I0130 11:56:07.201792 4869 scope.go:117] "RemoveContainer" containerID="68714ce457f72fd6a09bad237129e635243b08ef7d02128699c0462c88a2a5e8" Jan 30 11:56:07 crc kubenswrapper[4869]: E0130 11:56:07.202236 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68714ce457f72fd6a09bad237129e635243b08ef7d02128699c0462c88a2a5e8\": container with ID starting with 68714ce457f72fd6a09bad237129e635243b08ef7d02128699c0462c88a2a5e8 not found: ID does not exist" containerID="68714ce457f72fd6a09bad237129e635243b08ef7d02128699c0462c88a2a5e8" Jan 30 11:56:07 crc kubenswrapper[4869]: I0130 11:56:07.202267 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68714ce457f72fd6a09bad237129e635243b08ef7d02128699c0462c88a2a5e8"} err="failed to get container status \"68714ce457f72fd6a09bad237129e635243b08ef7d02128699c0462c88a2a5e8\": rpc error: code = NotFound desc = could not find container \"68714ce457f72fd6a09bad237129e635243b08ef7d02128699c0462c88a2a5e8\": container with ID starting with 68714ce457f72fd6a09bad237129e635243b08ef7d02128699c0462c88a2a5e8 not found: ID does not exist" Jan 30 11:56:07 crc kubenswrapper[4869]: I0130 11:56:07.202291 4869 scope.go:117] "RemoveContainer" containerID="31751c1e0cc305f4a7c72fa554178619f6d2c8f81cdd7b2e5c4e4e4564cb9255" Jan 30 11:56:07 crc kubenswrapper[4869]: E0130 11:56:07.202567 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31751c1e0cc305f4a7c72fa554178619f6d2c8f81cdd7b2e5c4e4e4564cb9255\": container with ID starting with 31751c1e0cc305f4a7c72fa554178619f6d2c8f81cdd7b2e5c4e4e4564cb9255 not found: ID does not exist" containerID="31751c1e0cc305f4a7c72fa554178619f6d2c8f81cdd7b2e5c4e4e4564cb9255" Jan 30 11:56:07 crc kubenswrapper[4869]: I0130 11:56:07.202593 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31751c1e0cc305f4a7c72fa554178619f6d2c8f81cdd7b2e5c4e4e4564cb9255"} err="failed to get container status \"31751c1e0cc305f4a7c72fa554178619f6d2c8f81cdd7b2e5c4e4e4564cb9255\": rpc error: code = NotFound desc = could not find container \"31751c1e0cc305f4a7c72fa554178619f6d2c8f81cdd7b2e5c4e4e4564cb9255\": container with ID starting with 31751c1e0cc305f4a7c72fa554178619f6d2c8f81cdd7b2e5c4e4e4564cb9255 not found: ID does not exist" Jan 30 11:56:08 crc kubenswrapper[4869]: I0130 11:56:08.144034 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f048bb0b-244b-4e61-84e7-f94d91c8da40" path="/var/lib/kubelet/pods/f048bb0b-244b-4e61-84e7-f94d91c8da40/volumes" Jan 30 11:57:21 crc kubenswrapper[4869]: I0130 11:57:21.769112 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:57:21 crc kubenswrapper[4869]: I0130 11:57:21.769672 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:57:51 crc kubenswrapper[4869]: I0130 11:57:51.768914 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:57:51 crc kubenswrapper[4869]: I0130 11:57:51.769555 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:58:15 crc kubenswrapper[4869]: I0130 11:58:15.381023 4869 scope.go:117] "RemoveContainer" containerID="f67014bbda714be3d1caf9ea5c807566725b1f75d1c36f1f57e8d8c943362f7f" Jan 30 11:58:15 crc kubenswrapper[4869]: I0130 11:58:15.415165 4869 scope.go:117] "RemoveContainer" containerID="cffc677019baf9fb7831155ea1aab3c2487b4634f336efbf76a7f1d5aac06305" Jan 30 11:58:15 crc kubenswrapper[4869]: I0130 11:58:15.432643 4869 scope.go:117] "RemoveContainer" containerID="d79db0f14c2e20f79f5223e45b991628b70b75bbb4a3043c23b68d6cc4286f40" Jan 30 11:58:21 crc kubenswrapper[4869]: I0130 11:58:21.769659 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 11:58:21 crc kubenswrapper[4869]: I0130 11:58:21.770224 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 11:58:21 crc kubenswrapper[4869]: I0130 11:58:21.770292 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 11:58:21 crc kubenswrapper[4869]: I0130 11:58:21.772320 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4"} pod="openshift-machine-config-operator/machine-config-daemon-99lr2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 11:58:21 crc kubenswrapper[4869]: I0130 11:58:21.772390 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" containerID="cri-o://18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" gracePeriod=600 Jan 30 11:58:22 crc kubenswrapper[4869]: I0130 11:58:22.013982 4869 generic.go:334] "Generic (PLEG): container finished" podID="ef13186b-7f82-4025-97e3-d899be8c207f" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" exitCode=0 Jan 30 11:58:22 crc kubenswrapper[4869]: I0130 11:58:22.014027 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerDied","Data":"18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4"} Jan 30 11:58:22 crc kubenswrapper[4869]: I0130 11:58:22.014058 4869 scope.go:117] "RemoveContainer" containerID="94f570b1b967af4f8ae6075c92ac563b5379a48c31b5c2c0c62bb8a7c2c22978" Jan 30 11:58:22 crc kubenswrapper[4869]: E0130 11:58:22.477059 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:58:23 crc kubenswrapper[4869]: I0130 11:58:23.024268 4869 scope.go:117] "RemoveContainer" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" Jan 30 11:58:23 crc kubenswrapper[4869]: E0130 11:58:23.024505 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:58:36 crc kubenswrapper[4869]: I0130 11:58:36.133679 4869 scope.go:117] "RemoveContainer" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" Jan 30 11:58:36 crc kubenswrapper[4869]: E0130 11:58:36.134476 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:58:51 crc kubenswrapper[4869]: I0130 11:58:51.132572 4869 scope.go:117] "RemoveContainer" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" Jan 30 11:58:51 crc kubenswrapper[4869]: E0130 11:58:51.133347 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:59:02 crc kubenswrapper[4869]: I0130 11:59:02.134026 4869 scope.go:117] "RemoveContainer" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" Jan 30 11:59:02 crc kubenswrapper[4869]: E0130 11:59:02.134785 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:59:14 crc kubenswrapper[4869]: I0130 11:59:14.133177 4869 scope.go:117] "RemoveContainer" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" Jan 30 11:59:14 crc kubenswrapper[4869]: E0130 11:59:14.134408 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:59:27 crc kubenswrapper[4869]: I0130 11:59:27.133660 4869 scope.go:117] "RemoveContainer" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" Jan 30 11:59:27 crc kubenswrapper[4869]: E0130 11:59:27.134588 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:59:40 crc kubenswrapper[4869]: I0130 11:59:40.137091 4869 scope.go:117] "RemoveContainer" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" Jan 30 11:59:40 crc kubenswrapper[4869]: E0130 11:59:40.137743 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:59:52 crc kubenswrapper[4869]: I0130 11:59:52.133639 4869 scope.go:117] "RemoveContainer" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" Jan 30 11:59:52 crc kubenswrapper[4869]: E0130 11:59:52.134383 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 11:59:54 crc kubenswrapper[4869]: I0130 11:59:54.797286 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-m77kh"] Jan 30 11:59:54 crc kubenswrapper[4869]: E0130 11:59:54.797991 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f048bb0b-244b-4e61-84e7-f94d91c8da40" containerName="extract-utilities" Jan 30 11:59:54 crc kubenswrapper[4869]: I0130 11:59:54.798010 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f048bb0b-244b-4e61-84e7-f94d91c8da40" containerName="extract-utilities" Jan 30 11:59:54 crc kubenswrapper[4869]: E0130 11:59:54.798049 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f048bb0b-244b-4e61-84e7-f94d91c8da40" containerName="registry-server" Jan 30 11:59:54 crc kubenswrapper[4869]: I0130 11:59:54.798060 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f048bb0b-244b-4e61-84e7-f94d91c8da40" containerName="registry-server" Jan 30 11:59:54 crc kubenswrapper[4869]: E0130 11:59:54.798074 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f048bb0b-244b-4e61-84e7-f94d91c8da40" containerName="extract-content" Jan 30 11:59:54 crc kubenswrapper[4869]: I0130 11:59:54.798083 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f048bb0b-244b-4e61-84e7-f94d91c8da40" containerName="extract-content" Jan 30 11:59:54 crc kubenswrapper[4869]: I0130 11:59:54.798272 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="f048bb0b-244b-4e61-84e7-f94d91c8da40" containerName="registry-server" Jan 30 11:59:54 crc kubenswrapper[4869]: I0130 11:59:54.799586 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m77kh" Jan 30 11:59:54 crc kubenswrapper[4869]: I0130 11:59:54.817544 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-m77kh"] Jan 30 11:59:54 crc kubenswrapper[4869]: I0130 11:59:54.984227 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/adb25aee-fe09-4ad1-85fc-cc9e396ceaed-catalog-content\") pod \"community-operators-m77kh\" (UID: \"adb25aee-fe09-4ad1-85fc-cc9e396ceaed\") " pod="openshift-marketplace/community-operators-m77kh" Jan 30 11:59:54 crc kubenswrapper[4869]: I0130 11:59:54.984334 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kjgp\" (UniqueName: \"kubernetes.io/projected/adb25aee-fe09-4ad1-85fc-cc9e396ceaed-kube-api-access-4kjgp\") pod \"community-operators-m77kh\" (UID: \"adb25aee-fe09-4ad1-85fc-cc9e396ceaed\") " pod="openshift-marketplace/community-operators-m77kh" Jan 30 11:59:54 crc kubenswrapper[4869]: I0130 11:59:54.985001 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/adb25aee-fe09-4ad1-85fc-cc9e396ceaed-utilities\") pod \"community-operators-m77kh\" (UID: \"adb25aee-fe09-4ad1-85fc-cc9e396ceaed\") " pod="openshift-marketplace/community-operators-m77kh" Jan 30 11:59:55 crc kubenswrapper[4869]: I0130 11:59:55.086457 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/adb25aee-fe09-4ad1-85fc-cc9e396ceaed-utilities\") pod \"community-operators-m77kh\" (UID: \"adb25aee-fe09-4ad1-85fc-cc9e396ceaed\") " pod="openshift-marketplace/community-operators-m77kh" Jan 30 11:59:55 crc kubenswrapper[4869]: I0130 11:59:55.086543 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/adb25aee-fe09-4ad1-85fc-cc9e396ceaed-catalog-content\") pod \"community-operators-m77kh\" (UID: \"adb25aee-fe09-4ad1-85fc-cc9e396ceaed\") " pod="openshift-marketplace/community-operators-m77kh" Jan 30 11:59:55 crc kubenswrapper[4869]: I0130 11:59:55.086614 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kjgp\" (UniqueName: \"kubernetes.io/projected/adb25aee-fe09-4ad1-85fc-cc9e396ceaed-kube-api-access-4kjgp\") pod \"community-operators-m77kh\" (UID: \"adb25aee-fe09-4ad1-85fc-cc9e396ceaed\") " pod="openshift-marketplace/community-operators-m77kh" Jan 30 11:59:55 crc kubenswrapper[4869]: I0130 11:59:55.087306 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/adb25aee-fe09-4ad1-85fc-cc9e396ceaed-utilities\") pod \"community-operators-m77kh\" (UID: \"adb25aee-fe09-4ad1-85fc-cc9e396ceaed\") " pod="openshift-marketplace/community-operators-m77kh" Jan 30 11:59:55 crc kubenswrapper[4869]: I0130 11:59:55.087338 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/adb25aee-fe09-4ad1-85fc-cc9e396ceaed-catalog-content\") pod \"community-operators-m77kh\" (UID: \"adb25aee-fe09-4ad1-85fc-cc9e396ceaed\") " pod="openshift-marketplace/community-operators-m77kh" Jan 30 11:59:55 crc kubenswrapper[4869]: I0130 11:59:55.110127 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kjgp\" (UniqueName: \"kubernetes.io/projected/adb25aee-fe09-4ad1-85fc-cc9e396ceaed-kube-api-access-4kjgp\") pod \"community-operators-m77kh\" (UID: \"adb25aee-fe09-4ad1-85fc-cc9e396ceaed\") " pod="openshift-marketplace/community-operators-m77kh" Jan 30 11:59:55 crc kubenswrapper[4869]: I0130 11:59:55.122989 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m77kh" Jan 30 11:59:55 crc kubenswrapper[4869]: I0130 11:59:55.898387 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-m77kh"] Jan 30 11:59:56 crc kubenswrapper[4869]: I0130 11:59:56.631801 4869 generic.go:334] "Generic (PLEG): container finished" podID="adb25aee-fe09-4ad1-85fc-cc9e396ceaed" containerID="3de7e899541cfc8eb9d4308baf71cf580d9242261f7f8a6e5c830dc36a473f16" exitCode=0 Jan 30 11:59:56 crc kubenswrapper[4869]: I0130 11:59:56.631941 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m77kh" event={"ID":"adb25aee-fe09-4ad1-85fc-cc9e396ceaed","Type":"ContainerDied","Data":"3de7e899541cfc8eb9d4308baf71cf580d9242261f7f8a6e5c830dc36a473f16"} Jan 30 11:59:56 crc kubenswrapper[4869]: I0130 11:59:56.632335 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m77kh" event={"ID":"adb25aee-fe09-4ad1-85fc-cc9e396ceaed","Type":"ContainerStarted","Data":"eda73227d39d3046471dc466482c118dd2c75f4517964e2a248c469ddfdcfd99"} Jan 30 11:59:57 crc kubenswrapper[4869]: I0130 11:59:57.641227 4869 generic.go:334] "Generic (PLEG): container finished" podID="adb25aee-fe09-4ad1-85fc-cc9e396ceaed" containerID="594f5abcf52bee1468e22bd95e2abceda53c9eb94ea140e942e710e944c04647" exitCode=0 Jan 30 11:59:57 crc kubenswrapper[4869]: I0130 11:59:57.641298 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m77kh" event={"ID":"adb25aee-fe09-4ad1-85fc-cc9e396ceaed","Type":"ContainerDied","Data":"594f5abcf52bee1468e22bd95e2abceda53c9eb94ea140e942e710e944c04647"} Jan 30 11:59:58 crc kubenswrapper[4869]: I0130 11:59:58.649769 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m77kh" event={"ID":"adb25aee-fe09-4ad1-85fc-cc9e396ceaed","Type":"ContainerStarted","Data":"84682592e640944d473e9bb9317e73dbf5d7c59c370ce44effffa6d44933214b"} Jan 30 11:59:58 crc kubenswrapper[4869]: I0130 11:59:58.675518 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-m77kh" podStartSLOduration=3.166189299 podStartE2EDuration="4.675496405s" podCreationTimestamp="2026-01-30 11:59:54 +0000 UTC" firstStartedPulling="2026-01-30 11:59:56.633981491 +0000 UTC m=+3947.183857557" lastFinishedPulling="2026-01-30 11:59:58.143288597 +0000 UTC m=+3948.693164663" observedRunningTime="2026-01-30 11:59:58.669570377 +0000 UTC m=+3949.219446443" watchObservedRunningTime="2026-01-30 11:59:58.675496405 +0000 UTC m=+3949.225372471" Jan 30 12:00:00 crc kubenswrapper[4869]: I0130 12:00:00.167766 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496240-m2zpg"] Jan 30 12:00:00 crc kubenswrapper[4869]: I0130 12:00:00.168924 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496240-m2zpg" Jan 30 12:00:00 crc kubenswrapper[4869]: I0130 12:00:00.170984 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 30 12:00:00 crc kubenswrapper[4869]: I0130 12:00:00.176819 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496240-m2zpg"] Jan 30 12:00:00 crc kubenswrapper[4869]: I0130 12:00:00.177626 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7bf49b3f-5c47-44e5-948b-6188aa800ce9-config-volume\") pod \"collect-profiles-29496240-m2zpg\" (UID: \"7bf49b3f-5c47-44e5-948b-6188aa800ce9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496240-m2zpg" Jan 30 12:00:00 crc kubenswrapper[4869]: I0130 12:00:00.177672 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mc96s\" (UniqueName: \"kubernetes.io/projected/7bf49b3f-5c47-44e5-948b-6188aa800ce9-kube-api-access-mc96s\") pod \"collect-profiles-29496240-m2zpg\" (UID: \"7bf49b3f-5c47-44e5-948b-6188aa800ce9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496240-m2zpg" Jan 30 12:00:00 crc kubenswrapper[4869]: I0130 12:00:00.177700 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7bf49b3f-5c47-44e5-948b-6188aa800ce9-secret-volume\") pod \"collect-profiles-29496240-m2zpg\" (UID: \"7bf49b3f-5c47-44e5-948b-6188aa800ce9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496240-m2zpg" Jan 30 12:00:00 crc kubenswrapper[4869]: I0130 12:00:00.179003 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 30 12:00:00 crc kubenswrapper[4869]: I0130 12:00:00.278834 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7bf49b3f-5c47-44e5-948b-6188aa800ce9-config-volume\") pod \"collect-profiles-29496240-m2zpg\" (UID: \"7bf49b3f-5c47-44e5-948b-6188aa800ce9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496240-m2zpg" Jan 30 12:00:00 crc kubenswrapper[4869]: I0130 12:00:00.278900 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mc96s\" (UniqueName: \"kubernetes.io/projected/7bf49b3f-5c47-44e5-948b-6188aa800ce9-kube-api-access-mc96s\") pod \"collect-profiles-29496240-m2zpg\" (UID: \"7bf49b3f-5c47-44e5-948b-6188aa800ce9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496240-m2zpg" Jan 30 12:00:00 crc kubenswrapper[4869]: I0130 12:00:00.278928 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7bf49b3f-5c47-44e5-948b-6188aa800ce9-secret-volume\") pod \"collect-profiles-29496240-m2zpg\" (UID: \"7bf49b3f-5c47-44e5-948b-6188aa800ce9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496240-m2zpg" Jan 30 12:00:00 crc kubenswrapper[4869]: I0130 12:00:00.280066 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7bf49b3f-5c47-44e5-948b-6188aa800ce9-config-volume\") pod \"collect-profiles-29496240-m2zpg\" (UID: \"7bf49b3f-5c47-44e5-948b-6188aa800ce9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496240-m2zpg" Jan 30 12:00:00 crc kubenswrapper[4869]: I0130 12:00:00.284749 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7bf49b3f-5c47-44e5-948b-6188aa800ce9-secret-volume\") pod \"collect-profiles-29496240-m2zpg\" (UID: \"7bf49b3f-5c47-44e5-948b-6188aa800ce9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496240-m2zpg" Jan 30 12:00:00 crc kubenswrapper[4869]: I0130 12:00:00.301499 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mc96s\" (UniqueName: \"kubernetes.io/projected/7bf49b3f-5c47-44e5-948b-6188aa800ce9-kube-api-access-mc96s\") pod \"collect-profiles-29496240-m2zpg\" (UID: \"7bf49b3f-5c47-44e5-948b-6188aa800ce9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496240-m2zpg" Jan 30 12:00:00 crc kubenswrapper[4869]: I0130 12:00:00.498655 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496240-m2zpg" Jan 30 12:00:00 crc kubenswrapper[4869]: I0130 12:00:00.902546 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496240-m2zpg"] Jan 30 12:00:00 crc kubenswrapper[4869]: W0130 12:00:00.908504 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7bf49b3f_5c47_44e5_948b_6188aa800ce9.slice/crio-39ac213e4a617b52e9642319567dab71acc140f652988e8b84d7ae55813e205a WatchSource:0}: Error finding container 39ac213e4a617b52e9642319567dab71acc140f652988e8b84d7ae55813e205a: Status 404 returned error can't find the container with id 39ac213e4a617b52e9642319567dab71acc140f652988e8b84d7ae55813e205a Jan 30 12:00:01 crc kubenswrapper[4869]: I0130 12:00:01.674641 4869 generic.go:334] "Generic (PLEG): container finished" podID="7bf49b3f-5c47-44e5-948b-6188aa800ce9" containerID="c83745970ce69ea4b2b2a52cee8b940164719c0be6d0c21f684983de8c6a8d43" exitCode=0 Jan 30 12:00:01 crc kubenswrapper[4869]: I0130 12:00:01.674747 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496240-m2zpg" event={"ID":"7bf49b3f-5c47-44e5-948b-6188aa800ce9","Type":"ContainerDied","Data":"c83745970ce69ea4b2b2a52cee8b940164719c0be6d0c21f684983de8c6a8d43"} Jan 30 12:00:01 crc kubenswrapper[4869]: I0130 12:00:01.675137 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496240-m2zpg" event={"ID":"7bf49b3f-5c47-44e5-948b-6188aa800ce9","Type":"ContainerStarted","Data":"39ac213e4a617b52e9642319567dab71acc140f652988e8b84d7ae55813e205a"} Jan 30 12:00:03 crc kubenswrapper[4869]: I0130 12:00:03.082516 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496240-m2zpg" Jan 30 12:00:03 crc kubenswrapper[4869]: I0130 12:00:03.127304 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7bf49b3f-5c47-44e5-948b-6188aa800ce9-config-volume\") pod \"7bf49b3f-5c47-44e5-948b-6188aa800ce9\" (UID: \"7bf49b3f-5c47-44e5-948b-6188aa800ce9\") " Jan 30 12:00:03 crc kubenswrapper[4869]: I0130 12:00:03.127361 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mc96s\" (UniqueName: \"kubernetes.io/projected/7bf49b3f-5c47-44e5-948b-6188aa800ce9-kube-api-access-mc96s\") pod \"7bf49b3f-5c47-44e5-948b-6188aa800ce9\" (UID: \"7bf49b3f-5c47-44e5-948b-6188aa800ce9\") " Jan 30 12:00:03 crc kubenswrapper[4869]: I0130 12:00:03.127412 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7bf49b3f-5c47-44e5-948b-6188aa800ce9-secret-volume\") pod \"7bf49b3f-5c47-44e5-948b-6188aa800ce9\" (UID: \"7bf49b3f-5c47-44e5-948b-6188aa800ce9\") " Jan 30 12:00:03 crc kubenswrapper[4869]: I0130 12:00:03.128258 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bf49b3f-5c47-44e5-948b-6188aa800ce9-config-volume" (OuterVolumeSpecName: "config-volume") pod "7bf49b3f-5c47-44e5-948b-6188aa800ce9" (UID: "7bf49b3f-5c47-44e5-948b-6188aa800ce9"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 12:00:03 crc kubenswrapper[4869]: I0130 12:00:03.132580 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bf49b3f-5c47-44e5-948b-6188aa800ce9-kube-api-access-mc96s" (OuterVolumeSpecName: "kube-api-access-mc96s") pod "7bf49b3f-5c47-44e5-948b-6188aa800ce9" (UID: "7bf49b3f-5c47-44e5-948b-6188aa800ce9"). InnerVolumeSpecName "kube-api-access-mc96s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:00:03 crc kubenswrapper[4869]: I0130 12:00:03.132691 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bf49b3f-5c47-44e5-948b-6188aa800ce9-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "7bf49b3f-5c47-44e5-948b-6188aa800ce9" (UID: "7bf49b3f-5c47-44e5-948b-6188aa800ce9"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 12:00:03 crc kubenswrapper[4869]: I0130 12:00:03.228854 4869 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7bf49b3f-5c47-44e5-948b-6188aa800ce9-config-volume\") on node \"crc\" DevicePath \"\"" Jan 30 12:00:03 crc kubenswrapper[4869]: I0130 12:00:03.229186 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mc96s\" (UniqueName: \"kubernetes.io/projected/7bf49b3f-5c47-44e5-948b-6188aa800ce9-kube-api-access-mc96s\") on node \"crc\" DevicePath \"\"" Jan 30 12:00:03 crc kubenswrapper[4869]: I0130 12:00:03.229414 4869 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7bf49b3f-5c47-44e5-948b-6188aa800ce9-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 30 12:00:03 crc kubenswrapper[4869]: I0130 12:00:03.690646 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496240-m2zpg" event={"ID":"7bf49b3f-5c47-44e5-948b-6188aa800ce9","Type":"ContainerDied","Data":"39ac213e4a617b52e9642319567dab71acc140f652988e8b84d7ae55813e205a"} Jan 30 12:00:03 crc kubenswrapper[4869]: I0130 12:00:03.690688 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="39ac213e4a617b52e9642319567dab71acc140f652988e8b84d7ae55813e205a" Jan 30 12:00:03 crc kubenswrapper[4869]: I0130 12:00:03.690741 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496240-m2zpg" Jan 30 12:00:04 crc kubenswrapper[4869]: I0130 12:00:04.166758 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496195-7ln2c"] Jan 30 12:00:04 crc kubenswrapper[4869]: I0130 12:00:04.173434 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496195-7ln2c"] Jan 30 12:00:05 crc kubenswrapper[4869]: I0130 12:00:05.123870 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-m77kh" Jan 30 12:00:05 crc kubenswrapper[4869]: I0130 12:00:05.123925 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-m77kh" Jan 30 12:00:05 crc kubenswrapper[4869]: I0130 12:00:05.167900 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-m77kh" Jan 30 12:00:05 crc kubenswrapper[4869]: I0130 12:00:05.748951 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-m77kh" Jan 30 12:00:05 crc kubenswrapper[4869]: I0130 12:00:05.792237 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-m77kh"] Jan 30 12:00:06 crc kubenswrapper[4869]: I0130 12:00:06.141261 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b" path="/var/lib/kubelet/pods/e9feb67d-ce15-4b08-8e6d-ad0e3ddbea8b/volumes" Jan 30 12:00:07 crc kubenswrapper[4869]: I0130 12:00:07.132640 4869 scope.go:117] "RemoveContainer" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" Jan 30 12:00:07 crc kubenswrapper[4869]: E0130 12:00:07.133207 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:00:07 crc kubenswrapper[4869]: I0130 12:00:07.715367 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-m77kh" podUID="adb25aee-fe09-4ad1-85fc-cc9e396ceaed" containerName="registry-server" containerID="cri-o://84682592e640944d473e9bb9317e73dbf5d7c59c370ce44effffa6d44933214b" gracePeriod=2 Jan 30 12:00:08 crc kubenswrapper[4869]: I0130 12:00:08.152483 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m77kh" Jan 30 12:00:08 crc kubenswrapper[4869]: I0130 12:00:08.191048 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/adb25aee-fe09-4ad1-85fc-cc9e396ceaed-catalog-content\") pod \"adb25aee-fe09-4ad1-85fc-cc9e396ceaed\" (UID: \"adb25aee-fe09-4ad1-85fc-cc9e396ceaed\") " Jan 30 12:00:08 crc kubenswrapper[4869]: I0130 12:00:08.191134 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4kjgp\" (UniqueName: \"kubernetes.io/projected/adb25aee-fe09-4ad1-85fc-cc9e396ceaed-kube-api-access-4kjgp\") pod \"adb25aee-fe09-4ad1-85fc-cc9e396ceaed\" (UID: \"adb25aee-fe09-4ad1-85fc-cc9e396ceaed\") " Jan 30 12:00:08 crc kubenswrapper[4869]: I0130 12:00:08.191192 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/adb25aee-fe09-4ad1-85fc-cc9e396ceaed-utilities\") pod \"adb25aee-fe09-4ad1-85fc-cc9e396ceaed\" (UID: \"adb25aee-fe09-4ad1-85fc-cc9e396ceaed\") " Jan 30 12:00:08 crc kubenswrapper[4869]: I0130 12:00:08.192199 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/adb25aee-fe09-4ad1-85fc-cc9e396ceaed-utilities" (OuterVolumeSpecName: "utilities") pod "adb25aee-fe09-4ad1-85fc-cc9e396ceaed" (UID: "adb25aee-fe09-4ad1-85fc-cc9e396ceaed"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 12:00:08 crc kubenswrapper[4869]: I0130 12:00:08.197043 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/adb25aee-fe09-4ad1-85fc-cc9e396ceaed-kube-api-access-4kjgp" (OuterVolumeSpecName: "kube-api-access-4kjgp") pod "adb25aee-fe09-4ad1-85fc-cc9e396ceaed" (UID: "adb25aee-fe09-4ad1-85fc-cc9e396ceaed"). InnerVolumeSpecName "kube-api-access-4kjgp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:00:08 crc kubenswrapper[4869]: I0130 12:00:08.292365 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4kjgp\" (UniqueName: \"kubernetes.io/projected/adb25aee-fe09-4ad1-85fc-cc9e396ceaed-kube-api-access-4kjgp\") on node \"crc\" DevicePath \"\"" Jan 30 12:00:08 crc kubenswrapper[4869]: I0130 12:00:08.292397 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/adb25aee-fe09-4ad1-85fc-cc9e396ceaed-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 12:00:08 crc kubenswrapper[4869]: I0130 12:00:08.337150 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/adb25aee-fe09-4ad1-85fc-cc9e396ceaed-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "adb25aee-fe09-4ad1-85fc-cc9e396ceaed" (UID: "adb25aee-fe09-4ad1-85fc-cc9e396ceaed"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 12:00:08 crc kubenswrapper[4869]: I0130 12:00:08.394150 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/adb25aee-fe09-4ad1-85fc-cc9e396ceaed-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 12:00:08 crc kubenswrapper[4869]: I0130 12:00:08.725627 4869 generic.go:334] "Generic (PLEG): container finished" podID="adb25aee-fe09-4ad1-85fc-cc9e396ceaed" containerID="84682592e640944d473e9bb9317e73dbf5d7c59c370ce44effffa6d44933214b" exitCode=0 Jan 30 12:00:08 crc kubenswrapper[4869]: I0130 12:00:08.725698 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m77kh" Jan 30 12:00:08 crc kubenswrapper[4869]: I0130 12:00:08.725693 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m77kh" event={"ID":"adb25aee-fe09-4ad1-85fc-cc9e396ceaed","Type":"ContainerDied","Data":"84682592e640944d473e9bb9317e73dbf5d7c59c370ce44effffa6d44933214b"} Jan 30 12:00:08 crc kubenswrapper[4869]: I0130 12:00:08.725792 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m77kh" event={"ID":"adb25aee-fe09-4ad1-85fc-cc9e396ceaed","Type":"ContainerDied","Data":"eda73227d39d3046471dc466482c118dd2c75f4517964e2a248c469ddfdcfd99"} Jan 30 12:00:08 crc kubenswrapper[4869]: I0130 12:00:08.725848 4869 scope.go:117] "RemoveContainer" containerID="84682592e640944d473e9bb9317e73dbf5d7c59c370ce44effffa6d44933214b" Jan 30 12:00:08 crc kubenswrapper[4869]: I0130 12:00:08.742754 4869 scope.go:117] "RemoveContainer" containerID="594f5abcf52bee1468e22bd95e2abceda53c9eb94ea140e942e710e944c04647" Jan 30 12:00:08 crc kubenswrapper[4869]: I0130 12:00:08.761066 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-m77kh"] Jan 30 12:00:08 crc kubenswrapper[4869]: I0130 12:00:08.767601 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-m77kh"] Jan 30 12:00:08 crc kubenswrapper[4869]: I0130 12:00:08.869290 4869 scope.go:117] "RemoveContainer" containerID="3de7e899541cfc8eb9d4308baf71cf580d9242261f7f8a6e5c830dc36a473f16" Jan 30 12:00:08 crc kubenswrapper[4869]: I0130 12:00:08.991039 4869 scope.go:117] "RemoveContainer" containerID="84682592e640944d473e9bb9317e73dbf5d7c59c370ce44effffa6d44933214b" Jan 30 12:00:08 crc kubenswrapper[4869]: E0130 12:00:08.991586 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84682592e640944d473e9bb9317e73dbf5d7c59c370ce44effffa6d44933214b\": container with ID starting with 84682592e640944d473e9bb9317e73dbf5d7c59c370ce44effffa6d44933214b not found: ID does not exist" containerID="84682592e640944d473e9bb9317e73dbf5d7c59c370ce44effffa6d44933214b" Jan 30 12:00:08 crc kubenswrapper[4869]: I0130 12:00:08.991627 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84682592e640944d473e9bb9317e73dbf5d7c59c370ce44effffa6d44933214b"} err="failed to get container status \"84682592e640944d473e9bb9317e73dbf5d7c59c370ce44effffa6d44933214b\": rpc error: code = NotFound desc = could not find container \"84682592e640944d473e9bb9317e73dbf5d7c59c370ce44effffa6d44933214b\": container with ID starting with 84682592e640944d473e9bb9317e73dbf5d7c59c370ce44effffa6d44933214b not found: ID does not exist" Jan 30 12:00:08 crc kubenswrapper[4869]: I0130 12:00:08.991656 4869 scope.go:117] "RemoveContainer" containerID="594f5abcf52bee1468e22bd95e2abceda53c9eb94ea140e942e710e944c04647" Jan 30 12:00:08 crc kubenswrapper[4869]: E0130 12:00:08.992058 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"594f5abcf52bee1468e22bd95e2abceda53c9eb94ea140e942e710e944c04647\": container with ID starting with 594f5abcf52bee1468e22bd95e2abceda53c9eb94ea140e942e710e944c04647 not found: ID does not exist" containerID="594f5abcf52bee1468e22bd95e2abceda53c9eb94ea140e942e710e944c04647" Jan 30 12:00:08 crc kubenswrapper[4869]: I0130 12:00:08.992092 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"594f5abcf52bee1468e22bd95e2abceda53c9eb94ea140e942e710e944c04647"} err="failed to get container status \"594f5abcf52bee1468e22bd95e2abceda53c9eb94ea140e942e710e944c04647\": rpc error: code = NotFound desc = could not find container \"594f5abcf52bee1468e22bd95e2abceda53c9eb94ea140e942e710e944c04647\": container with ID starting with 594f5abcf52bee1468e22bd95e2abceda53c9eb94ea140e942e710e944c04647 not found: ID does not exist" Jan 30 12:00:08 crc kubenswrapper[4869]: I0130 12:00:08.992109 4869 scope.go:117] "RemoveContainer" containerID="3de7e899541cfc8eb9d4308baf71cf580d9242261f7f8a6e5c830dc36a473f16" Jan 30 12:00:08 crc kubenswrapper[4869]: E0130 12:00:08.992960 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3de7e899541cfc8eb9d4308baf71cf580d9242261f7f8a6e5c830dc36a473f16\": container with ID starting with 3de7e899541cfc8eb9d4308baf71cf580d9242261f7f8a6e5c830dc36a473f16 not found: ID does not exist" containerID="3de7e899541cfc8eb9d4308baf71cf580d9242261f7f8a6e5c830dc36a473f16" Jan 30 12:00:08 crc kubenswrapper[4869]: I0130 12:00:08.992999 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3de7e899541cfc8eb9d4308baf71cf580d9242261f7f8a6e5c830dc36a473f16"} err="failed to get container status \"3de7e899541cfc8eb9d4308baf71cf580d9242261f7f8a6e5c830dc36a473f16\": rpc error: code = NotFound desc = could not find container \"3de7e899541cfc8eb9d4308baf71cf580d9242261f7f8a6e5c830dc36a473f16\": container with ID starting with 3de7e899541cfc8eb9d4308baf71cf580d9242261f7f8a6e5c830dc36a473f16 not found: ID does not exist" Jan 30 12:00:10 crc kubenswrapper[4869]: I0130 12:00:10.148451 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="adb25aee-fe09-4ad1-85fc-cc9e396ceaed" path="/var/lib/kubelet/pods/adb25aee-fe09-4ad1-85fc-cc9e396ceaed/volumes" Jan 30 12:00:15 crc kubenswrapper[4869]: I0130 12:00:15.477134 4869 scope.go:117] "RemoveContainer" containerID="e728007b89aeb0f94800a4be49a30a3fe81b587263865831d35850b63d9849f0" Jan 30 12:00:18 crc kubenswrapper[4869]: I0130 12:00:18.133476 4869 scope.go:117] "RemoveContainer" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" Jan 30 12:00:18 crc kubenswrapper[4869]: E0130 12:00:18.134075 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:00:31 crc kubenswrapper[4869]: I0130 12:00:31.133131 4869 scope.go:117] "RemoveContainer" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" Jan 30 12:00:31 crc kubenswrapper[4869]: E0130 12:00:31.133988 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:00:45 crc kubenswrapper[4869]: I0130 12:00:45.134337 4869 scope.go:117] "RemoveContainer" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" Jan 30 12:00:45 crc kubenswrapper[4869]: E0130 12:00:45.135690 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:00:57 crc kubenswrapper[4869]: I0130 12:00:57.133855 4869 scope.go:117] "RemoveContainer" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" Jan 30 12:00:57 crc kubenswrapper[4869]: E0130 12:00:57.134625 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:01:12 crc kubenswrapper[4869]: I0130 12:01:12.133068 4869 scope.go:117] "RemoveContainer" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" Jan 30 12:01:12 crc kubenswrapper[4869]: E0130 12:01:12.133836 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:01:27 crc kubenswrapper[4869]: I0130 12:01:27.133411 4869 scope.go:117] "RemoveContainer" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" Jan 30 12:01:27 crc kubenswrapper[4869]: E0130 12:01:27.135758 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:01:42 crc kubenswrapper[4869]: I0130 12:01:42.133088 4869 scope.go:117] "RemoveContainer" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" Jan 30 12:01:42 crc kubenswrapper[4869]: E0130 12:01:42.133782 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:01:54 crc kubenswrapper[4869]: I0130 12:01:54.133013 4869 scope.go:117] "RemoveContainer" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" Jan 30 12:01:54 crc kubenswrapper[4869]: E0130 12:01:54.133813 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:02:09 crc kubenswrapper[4869]: I0130 12:02:09.133000 4869 scope.go:117] "RemoveContainer" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" Jan 30 12:02:09 crc kubenswrapper[4869]: E0130 12:02:09.133687 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:02:23 crc kubenswrapper[4869]: I0130 12:02:23.132956 4869 scope.go:117] "RemoveContainer" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" Jan 30 12:02:23 crc kubenswrapper[4869]: E0130 12:02:23.133611 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:02:35 crc kubenswrapper[4869]: I0130 12:02:35.133866 4869 scope.go:117] "RemoveContainer" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" Jan 30 12:02:35 crc kubenswrapper[4869]: E0130 12:02:35.134549 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:02:40 crc kubenswrapper[4869]: I0130 12:02:40.517340 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mjp2g"] Jan 30 12:02:40 crc kubenswrapper[4869]: E0130 12:02:40.518271 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adb25aee-fe09-4ad1-85fc-cc9e396ceaed" containerName="extract-utilities" Jan 30 12:02:40 crc kubenswrapper[4869]: I0130 12:02:40.518285 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="adb25aee-fe09-4ad1-85fc-cc9e396ceaed" containerName="extract-utilities" Jan 30 12:02:40 crc kubenswrapper[4869]: E0130 12:02:40.518308 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adb25aee-fe09-4ad1-85fc-cc9e396ceaed" containerName="registry-server" Jan 30 12:02:40 crc kubenswrapper[4869]: I0130 12:02:40.518317 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="adb25aee-fe09-4ad1-85fc-cc9e396ceaed" containerName="registry-server" Jan 30 12:02:40 crc kubenswrapper[4869]: E0130 12:02:40.518327 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adb25aee-fe09-4ad1-85fc-cc9e396ceaed" containerName="extract-content" Jan 30 12:02:40 crc kubenswrapper[4869]: I0130 12:02:40.518334 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="adb25aee-fe09-4ad1-85fc-cc9e396ceaed" containerName="extract-content" Jan 30 12:02:40 crc kubenswrapper[4869]: E0130 12:02:40.518355 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bf49b3f-5c47-44e5-948b-6188aa800ce9" containerName="collect-profiles" Jan 30 12:02:40 crc kubenswrapper[4869]: I0130 12:02:40.518361 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bf49b3f-5c47-44e5-948b-6188aa800ce9" containerName="collect-profiles" Jan 30 12:02:40 crc kubenswrapper[4869]: I0130 12:02:40.518500 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="adb25aee-fe09-4ad1-85fc-cc9e396ceaed" containerName="registry-server" Jan 30 12:02:40 crc kubenswrapper[4869]: I0130 12:02:40.518530 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bf49b3f-5c47-44e5-948b-6188aa800ce9" containerName="collect-profiles" Jan 30 12:02:40 crc kubenswrapper[4869]: I0130 12:02:40.519592 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mjp2g" Jan 30 12:02:40 crc kubenswrapper[4869]: I0130 12:02:40.527363 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mjp2g"] Jan 30 12:02:40 crc kubenswrapper[4869]: I0130 12:02:40.582751 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdl4w\" (UniqueName: \"kubernetes.io/projected/592d085a-ad90-48d1-a872-7b4b2adf300c-kube-api-access-jdl4w\") pod \"certified-operators-mjp2g\" (UID: \"592d085a-ad90-48d1-a872-7b4b2adf300c\") " pod="openshift-marketplace/certified-operators-mjp2g" Jan 30 12:02:40 crc kubenswrapper[4869]: I0130 12:02:40.582807 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/592d085a-ad90-48d1-a872-7b4b2adf300c-catalog-content\") pod \"certified-operators-mjp2g\" (UID: \"592d085a-ad90-48d1-a872-7b4b2adf300c\") " pod="openshift-marketplace/certified-operators-mjp2g" Jan 30 12:02:40 crc kubenswrapper[4869]: I0130 12:02:40.582839 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/592d085a-ad90-48d1-a872-7b4b2adf300c-utilities\") pod \"certified-operators-mjp2g\" (UID: \"592d085a-ad90-48d1-a872-7b4b2adf300c\") " pod="openshift-marketplace/certified-operators-mjp2g" Jan 30 12:02:40 crc kubenswrapper[4869]: I0130 12:02:40.683739 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdl4w\" (UniqueName: \"kubernetes.io/projected/592d085a-ad90-48d1-a872-7b4b2adf300c-kube-api-access-jdl4w\") pod \"certified-operators-mjp2g\" (UID: \"592d085a-ad90-48d1-a872-7b4b2adf300c\") " pod="openshift-marketplace/certified-operators-mjp2g" Jan 30 12:02:40 crc kubenswrapper[4869]: I0130 12:02:40.683799 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/592d085a-ad90-48d1-a872-7b4b2adf300c-catalog-content\") pod \"certified-operators-mjp2g\" (UID: \"592d085a-ad90-48d1-a872-7b4b2adf300c\") " pod="openshift-marketplace/certified-operators-mjp2g" Jan 30 12:02:40 crc kubenswrapper[4869]: I0130 12:02:40.683838 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/592d085a-ad90-48d1-a872-7b4b2adf300c-utilities\") pod \"certified-operators-mjp2g\" (UID: \"592d085a-ad90-48d1-a872-7b4b2adf300c\") " pod="openshift-marketplace/certified-operators-mjp2g" Jan 30 12:02:40 crc kubenswrapper[4869]: I0130 12:02:40.684345 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/592d085a-ad90-48d1-a872-7b4b2adf300c-utilities\") pod \"certified-operators-mjp2g\" (UID: \"592d085a-ad90-48d1-a872-7b4b2adf300c\") " pod="openshift-marketplace/certified-operators-mjp2g" Jan 30 12:02:40 crc kubenswrapper[4869]: I0130 12:02:40.684476 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/592d085a-ad90-48d1-a872-7b4b2adf300c-catalog-content\") pod \"certified-operators-mjp2g\" (UID: \"592d085a-ad90-48d1-a872-7b4b2adf300c\") " pod="openshift-marketplace/certified-operators-mjp2g" Jan 30 12:02:40 crc kubenswrapper[4869]: I0130 12:02:40.702809 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdl4w\" (UniqueName: \"kubernetes.io/projected/592d085a-ad90-48d1-a872-7b4b2adf300c-kube-api-access-jdl4w\") pod \"certified-operators-mjp2g\" (UID: \"592d085a-ad90-48d1-a872-7b4b2adf300c\") " pod="openshift-marketplace/certified-operators-mjp2g" Jan 30 12:02:40 crc kubenswrapper[4869]: I0130 12:02:40.879095 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mjp2g" Jan 30 12:02:41 crc kubenswrapper[4869]: I0130 12:02:41.153880 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mjp2g"] Jan 30 12:02:41 crc kubenswrapper[4869]: I0130 12:02:41.759230 4869 generic.go:334] "Generic (PLEG): container finished" podID="592d085a-ad90-48d1-a872-7b4b2adf300c" containerID="da6ea7a4e5d1a42e57190a164d0d9f91a4f4a04f1200854d95d996479bf3f22c" exitCode=0 Jan 30 12:02:41 crc kubenswrapper[4869]: I0130 12:02:41.759283 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mjp2g" event={"ID":"592d085a-ad90-48d1-a872-7b4b2adf300c","Type":"ContainerDied","Data":"da6ea7a4e5d1a42e57190a164d0d9f91a4f4a04f1200854d95d996479bf3f22c"} Jan 30 12:02:41 crc kubenswrapper[4869]: I0130 12:02:41.759316 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mjp2g" event={"ID":"592d085a-ad90-48d1-a872-7b4b2adf300c","Type":"ContainerStarted","Data":"dc35dd8dce52267806e70693c8305e428492b607e6c498a46af7cc5cd31125cf"} Jan 30 12:02:41 crc kubenswrapper[4869]: I0130 12:02:41.764322 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 12:02:42 crc kubenswrapper[4869]: I0130 12:02:42.767734 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mjp2g" event={"ID":"592d085a-ad90-48d1-a872-7b4b2adf300c","Type":"ContainerStarted","Data":"95a44020d8ac7463a6ba7c217dc128004ab720230a22b7d1356a69f7de107492"} Jan 30 12:02:43 crc kubenswrapper[4869]: I0130 12:02:43.779050 4869 generic.go:334] "Generic (PLEG): container finished" podID="592d085a-ad90-48d1-a872-7b4b2adf300c" containerID="95a44020d8ac7463a6ba7c217dc128004ab720230a22b7d1356a69f7de107492" exitCode=0 Jan 30 12:02:43 crc kubenswrapper[4869]: I0130 12:02:43.779096 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mjp2g" event={"ID":"592d085a-ad90-48d1-a872-7b4b2adf300c","Type":"ContainerDied","Data":"95a44020d8ac7463a6ba7c217dc128004ab720230a22b7d1356a69f7de107492"} Jan 30 12:02:44 crc kubenswrapper[4869]: I0130 12:02:44.787892 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mjp2g" event={"ID":"592d085a-ad90-48d1-a872-7b4b2adf300c","Type":"ContainerStarted","Data":"423210401ee33f716ba54c1b9207e4bd84a3a4b53926c6413fd5ef5084c4b174"} Jan 30 12:02:44 crc kubenswrapper[4869]: I0130 12:02:44.812522 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mjp2g" podStartSLOduration=2.431050792 podStartE2EDuration="4.812503293s" podCreationTimestamp="2026-01-30 12:02:40 +0000 UTC" firstStartedPulling="2026-01-30 12:02:41.76402353 +0000 UTC m=+4112.313899596" lastFinishedPulling="2026-01-30 12:02:44.145476031 +0000 UTC m=+4114.695352097" observedRunningTime="2026-01-30 12:02:44.809307253 +0000 UTC m=+4115.359183319" watchObservedRunningTime="2026-01-30 12:02:44.812503293 +0000 UTC m=+4115.362379359" Jan 30 12:02:50 crc kubenswrapper[4869]: I0130 12:02:50.136930 4869 scope.go:117] "RemoveContainer" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" Jan 30 12:02:50 crc kubenswrapper[4869]: E0130 12:02:50.137449 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:02:50 crc kubenswrapper[4869]: I0130 12:02:50.879758 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mjp2g" Jan 30 12:02:50 crc kubenswrapper[4869]: I0130 12:02:50.879823 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mjp2g" Jan 30 12:02:50 crc kubenswrapper[4869]: I0130 12:02:50.920298 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mjp2g" Jan 30 12:02:51 crc kubenswrapper[4869]: I0130 12:02:51.875814 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mjp2g" Jan 30 12:02:51 crc kubenswrapper[4869]: I0130 12:02:51.927191 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mjp2g"] Jan 30 12:02:53 crc kubenswrapper[4869]: I0130 12:02:53.844651 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mjp2g" podUID="592d085a-ad90-48d1-a872-7b4b2adf300c" containerName="registry-server" containerID="cri-o://423210401ee33f716ba54c1b9207e4bd84a3a4b53926c6413fd5ef5084c4b174" gracePeriod=2 Jan 30 12:02:54 crc kubenswrapper[4869]: I0130 12:02:54.253471 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mjp2g" Jan 30 12:02:54 crc kubenswrapper[4869]: I0130 12:02:54.310474 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/592d085a-ad90-48d1-a872-7b4b2adf300c-utilities\") pod \"592d085a-ad90-48d1-a872-7b4b2adf300c\" (UID: \"592d085a-ad90-48d1-a872-7b4b2adf300c\") " Jan 30 12:02:54 crc kubenswrapper[4869]: I0130 12:02:54.310755 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jdl4w\" (UniqueName: \"kubernetes.io/projected/592d085a-ad90-48d1-a872-7b4b2adf300c-kube-api-access-jdl4w\") pod \"592d085a-ad90-48d1-a872-7b4b2adf300c\" (UID: \"592d085a-ad90-48d1-a872-7b4b2adf300c\") " Jan 30 12:02:54 crc kubenswrapper[4869]: I0130 12:02:54.310835 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/592d085a-ad90-48d1-a872-7b4b2adf300c-catalog-content\") pod \"592d085a-ad90-48d1-a872-7b4b2adf300c\" (UID: \"592d085a-ad90-48d1-a872-7b4b2adf300c\") " Jan 30 12:02:54 crc kubenswrapper[4869]: I0130 12:02:54.312367 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/592d085a-ad90-48d1-a872-7b4b2adf300c-utilities" (OuterVolumeSpecName: "utilities") pod "592d085a-ad90-48d1-a872-7b4b2adf300c" (UID: "592d085a-ad90-48d1-a872-7b4b2adf300c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 12:02:54 crc kubenswrapper[4869]: I0130 12:02:54.319680 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/592d085a-ad90-48d1-a872-7b4b2adf300c-kube-api-access-jdl4w" (OuterVolumeSpecName: "kube-api-access-jdl4w") pod "592d085a-ad90-48d1-a872-7b4b2adf300c" (UID: "592d085a-ad90-48d1-a872-7b4b2adf300c"). InnerVolumeSpecName "kube-api-access-jdl4w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:02:54 crc kubenswrapper[4869]: I0130 12:02:54.332919 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jdl4w\" (UniqueName: \"kubernetes.io/projected/592d085a-ad90-48d1-a872-7b4b2adf300c-kube-api-access-jdl4w\") on node \"crc\" DevicePath \"\"" Jan 30 12:02:54 crc kubenswrapper[4869]: I0130 12:02:54.332967 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/592d085a-ad90-48d1-a872-7b4b2adf300c-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 12:02:54 crc kubenswrapper[4869]: I0130 12:02:54.364445 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/592d085a-ad90-48d1-a872-7b4b2adf300c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "592d085a-ad90-48d1-a872-7b4b2adf300c" (UID: "592d085a-ad90-48d1-a872-7b4b2adf300c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 12:02:54 crc kubenswrapper[4869]: I0130 12:02:54.434093 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/592d085a-ad90-48d1-a872-7b4b2adf300c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 12:02:54 crc kubenswrapper[4869]: I0130 12:02:54.852842 4869 generic.go:334] "Generic (PLEG): container finished" podID="592d085a-ad90-48d1-a872-7b4b2adf300c" containerID="423210401ee33f716ba54c1b9207e4bd84a3a4b53926c6413fd5ef5084c4b174" exitCode=0 Jan 30 12:02:54 crc kubenswrapper[4869]: I0130 12:02:54.852881 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mjp2g" Jan 30 12:02:54 crc kubenswrapper[4869]: I0130 12:02:54.852905 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mjp2g" event={"ID":"592d085a-ad90-48d1-a872-7b4b2adf300c","Type":"ContainerDied","Data":"423210401ee33f716ba54c1b9207e4bd84a3a4b53926c6413fd5ef5084c4b174"} Jan 30 12:02:54 crc kubenswrapper[4869]: I0130 12:02:54.852944 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mjp2g" event={"ID":"592d085a-ad90-48d1-a872-7b4b2adf300c","Type":"ContainerDied","Data":"dc35dd8dce52267806e70693c8305e428492b607e6c498a46af7cc5cd31125cf"} Jan 30 12:02:54 crc kubenswrapper[4869]: I0130 12:02:54.852962 4869 scope.go:117] "RemoveContainer" containerID="423210401ee33f716ba54c1b9207e4bd84a3a4b53926c6413fd5ef5084c4b174" Jan 30 12:02:54 crc kubenswrapper[4869]: I0130 12:02:54.871195 4869 scope.go:117] "RemoveContainer" containerID="95a44020d8ac7463a6ba7c217dc128004ab720230a22b7d1356a69f7de107492" Jan 30 12:02:54 crc kubenswrapper[4869]: I0130 12:02:54.893033 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mjp2g"] Jan 30 12:02:54 crc kubenswrapper[4869]: I0130 12:02:54.901941 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mjp2g"] Jan 30 12:02:54 crc kubenswrapper[4869]: I0130 12:02:54.912760 4869 scope.go:117] "RemoveContainer" containerID="da6ea7a4e5d1a42e57190a164d0d9f91a4f4a04f1200854d95d996479bf3f22c" Jan 30 12:02:54 crc kubenswrapper[4869]: I0130 12:02:54.927239 4869 scope.go:117] "RemoveContainer" containerID="423210401ee33f716ba54c1b9207e4bd84a3a4b53926c6413fd5ef5084c4b174" Jan 30 12:02:54 crc kubenswrapper[4869]: E0130 12:02:54.927634 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"423210401ee33f716ba54c1b9207e4bd84a3a4b53926c6413fd5ef5084c4b174\": container with ID starting with 423210401ee33f716ba54c1b9207e4bd84a3a4b53926c6413fd5ef5084c4b174 not found: ID does not exist" containerID="423210401ee33f716ba54c1b9207e4bd84a3a4b53926c6413fd5ef5084c4b174" Jan 30 12:02:54 crc kubenswrapper[4869]: I0130 12:02:54.927672 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"423210401ee33f716ba54c1b9207e4bd84a3a4b53926c6413fd5ef5084c4b174"} err="failed to get container status \"423210401ee33f716ba54c1b9207e4bd84a3a4b53926c6413fd5ef5084c4b174\": rpc error: code = NotFound desc = could not find container \"423210401ee33f716ba54c1b9207e4bd84a3a4b53926c6413fd5ef5084c4b174\": container with ID starting with 423210401ee33f716ba54c1b9207e4bd84a3a4b53926c6413fd5ef5084c4b174 not found: ID does not exist" Jan 30 12:02:54 crc kubenswrapper[4869]: I0130 12:02:54.927699 4869 scope.go:117] "RemoveContainer" containerID="95a44020d8ac7463a6ba7c217dc128004ab720230a22b7d1356a69f7de107492" Jan 30 12:02:54 crc kubenswrapper[4869]: E0130 12:02:54.928071 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95a44020d8ac7463a6ba7c217dc128004ab720230a22b7d1356a69f7de107492\": container with ID starting with 95a44020d8ac7463a6ba7c217dc128004ab720230a22b7d1356a69f7de107492 not found: ID does not exist" containerID="95a44020d8ac7463a6ba7c217dc128004ab720230a22b7d1356a69f7de107492" Jan 30 12:02:54 crc kubenswrapper[4869]: I0130 12:02:54.928117 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95a44020d8ac7463a6ba7c217dc128004ab720230a22b7d1356a69f7de107492"} err="failed to get container status \"95a44020d8ac7463a6ba7c217dc128004ab720230a22b7d1356a69f7de107492\": rpc error: code = NotFound desc = could not find container \"95a44020d8ac7463a6ba7c217dc128004ab720230a22b7d1356a69f7de107492\": container with ID starting with 95a44020d8ac7463a6ba7c217dc128004ab720230a22b7d1356a69f7de107492 not found: ID does not exist" Jan 30 12:02:54 crc kubenswrapper[4869]: I0130 12:02:54.928149 4869 scope.go:117] "RemoveContainer" containerID="da6ea7a4e5d1a42e57190a164d0d9f91a4f4a04f1200854d95d996479bf3f22c" Jan 30 12:02:54 crc kubenswrapper[4869]: E0130 12:02:54.928413 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da6ea7a4e5d1a42e57190a164d0d9f91a4f4a04f1200854d95d996479bf3f22c\": container with ID starting with da6ea7a4e5d1a42e57190a164d0d9f91a4f4a04f1200854d95d996479bf3f22c not found: ID does not exist" containerID="da6ea7a4e5d1a42e57190a164d0d9f91a4f4a04f1200854d95d996479bf3f22c" Jan 30 12:02:54 crc kubenswrapper[4869]: I0130 12:02:54.928436 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da6ea7a4e5d1a42e57190a164d0d9f91a4f4a04f1200854d95d996479bf3f22c"} err="failed to get container status \"da6ea7a4e5d1a42e57190a164d0d9f91a4f4a04f1200854d95d996479bf3f22c\": rpc error: code = NotFound desc = could not find container \"da6ea7a4e5d1a42e57190a164d0d9f91a4f4a04f1200854d95d996479bf3f22c\": container with ID starting with da6ea7a4e5d1a42e57190a164d0d9f91a4f4a04f1200854d95d996479bf3f22c not found: ID does not exist" Jan 30 12:02:56 crc kubenswrapper[4869]: I0130 12:02:56.141688 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="592d085a-ad90-48d1-a872-7b4b2adf300c" path="/var/lib/kubelet/pods/592d085a-ad90-48d1-a872-7b4b2adf300c/volumes" Jan 30 12:03:01 crc kubenswrapper[4869]: I0130 12:03:01.133959 4869 scope.go:117] "RemoveContainer" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" Jan 30 12:03:01 crc kubenswrapper[4869]: E0130 12:03:01.134686 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:03:15 crc kubenswrapper[4869]: I0130 12:03:15.133530 4869 scope.go:117] "RemoveContainer" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" Jan 30 12:03:15 crc kubenswrapper[4869]: E0130 12:03:15.135440 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:03:30 crc kubenswrapper[4869]: I0130 12:03:30.137323 4869 scope.go:117] "RemoveContainer" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" Jan 30 12:03:31 crc kubenswrapper[4869]: I0130 12:03:31.088798 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerStarted","Data":"f3c0492f240ef8079c5a536a55c71f07e73380e7d54376f9d12ab4006ba8c385"} Jan 30 12:05:46 crc kubenswrapper[4869]: I0130 12:05:46.627545 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4rhwd"] Jan 30 12:05:46 crc kubenswrapper[4869]: E0130 12:05:46.628321 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="592d085a-ad90-48d1-a872-7b4b2adf300c" containerName="extract-content" Jan 30 12:05:46 crc kubenswrapper[4869]: I0130 12:05:46.628332 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="592d085a-ad90-48d1-a872-7b4b2adf300c" containerName="extract-content" Jan 30 12:05:46 crc kubenswrapper[4869]: E0130 12:05:46.628369 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="592d085a-ad90-48d1-a872-7b4b2adf300c" containerName="extract-utilities" Jan 30 12:05:46 crc kubenswrapper[4869]: I0130 12:05:46.628376 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="592d085a-ad90-48d1-a872-7b4b2adf300c" containerName="extract-utilities" Jan 30 12:05:46 crc kubenswrapper[4869]: E0130 12:05:46.628384 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="592d085a-ad90-48d1-a872-7b4b2adf300c" containerName="registry-server" Jan 30 12:05:46 crc kubenswrapper[4869]: I0130 12:05:46.628390 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="592d085a-ad90-48d1-a872-7b4b2adf300c" containerName="registry-server" Jan 30 12:05:46 crc kubenswrapper[4869]: I0130 12:05:46.628519 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="592d085a-ad90-48d1-a872-7b4b2adf300c" containerName="registry-server" Jan 30 12:05:46 crc kubenswrapper[4869]: I0130 12:05:46.631000 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4rhwd" Jan 30 12:05:46 crc kubenswrapper[4869]: I0130 12:05:46.638470 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4rhwd"] Jan 30 12:05:46 crc kubenswrapper[4869]: I0130 12:05:46.791751 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a80f073-6f20-437c-aef3-bb49cd1565ff-catalog-content\") pod \"redhat-operators-4rhwd\" (UID: \"4a80f073-6f20-437c-aef3-bb49cd1565ff\") " pod="openshift-marketplace/redhat-operators-4rhwd" Jan 30 12:05:46 crc kubenswrapper[4869]: I0130 12:05:46.791816 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a80f073-6f20-437c-aef3-bb49cd1565ff-utilities\") pod \"redhat-operators-4rhwd\" (UID: \"4a80f073-6f20-437c-aef3-bb49cd1565ff\") " pod="openshift-marketplace/redhat-operators-4rhwd" Jan 30 12:05:46 crc kubenswrapper[4869]: I0130 12:05:46.792138 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fx2jj\" (UniqueName: \"kubernetes.io/projected/4a80f073-6f20-437c-aef3-bb49cd1565ff-kube-api-access-fx2jj\") pod \"redhat-operators-4rhwd\" (UID: \"4a80f073-6f20-437c-aef3-bb49cd1565ff\") " pod="openshift-marketplace/redhat-operators-4rhwd" Jan 30 12:05:46 crc kubenswrapper[4869]: I0130 12:05:46.893349 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fx2jj\" (UniqueName: \"kubernetes.io/projected/4a80f073-6f20-437c-aef3-bb49cd1565ff-kube-api-access-fx2jj\") pod \"redhat-operators-4rhwd\" (UID: \"4a80f073-6f20-437c-aef3-bb49cd1565ff\") " pod="openshift-marketplace/redhat-operators-4rhwd" Jan 30 12:05:46 crc kubenswrapper[4869]: I0130 12:05:46.893402 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a80f073-6f20-437c-aef3-bb49cd1565ff-catalog-content\") pod \"redhat-operators-4rhwd\" (UID: \"4a80f073-6f20-437c-aef3-bb49cd1565ff\") " pod="openshift-marketplace/redhat-operators-4rhwd" Jan 30 12:05:46 crc kubenswrapper[4869]: I0130 12:05:46.893441 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a80f073-6f20-437c-aef3-bb49cd1565ff-utilities\") pod \"redhat-operators-4rhwd\" (UID: \"4a80f073-6f20-437c-aef3-bb49cd1565ff\") " pod="openshift-marketplace/redhat-operators-4rhwd" Jan 30 12:05:46 crc kubenswrapper[4869]: I0130 12:05:46.894017 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a80f073-6f20-437c-aef3-bb49cd1565ff-utilities\") pod \"redhat-operators-4rhwd\" (UID: \"4a80f073-6f20-437c-aef3-bb49cd1565ff\") " pod="openshift-marketplace/redhat-operators-4rhwd" Jan 30 12:05:46 crc kubenswrapper[4869]: I0130 12:05:46.894582 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a80f073-6f20-437c-aef3-bb49cd1565ff-catalog-content\") pod \"redhat-operators-4rhwd\" (UID: \"4a80f073-6f20-437c-aef3-bb49cd1565ff\") " pod="openshift-marketplace/redhat-operators-4rhwd" Jan 30 12:05:46 crc kubenswrapper[4869]: I0130 12:05:46.916994 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fx2jj\" (UniqueName: \"kubernetes.io/projected/4a80f073-6f20-437c-aef3-bb49cd1565ff-kube-api-access-fx2jj\") pod \"redhat-operators-4rhwd\" (UID: \"4a80f073-6f20-437c-aef3-bb49cd1565ff\") " pod="openshift-marketplace/redhat-operators-4rhwd" Jan 30 12:05:46 crc kubenswrapper[4869]: I0130 12:05:46.959029 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4rhwd" Jan 30 12:05:47 crc kubenswrapper[4869]: I0130 12:05:47.373615 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4rhwd"] Jan 30 12:05:48 crc kubenswrapper[4869]: I0130 12:05:48.159164 4869 generic.go:334] "Generic (PLEG): container finished" podID="4a80f073-6f20-437c-aef3-bb49cd1565ff" containerID="548bc1870111c3b4584b6b5ffada4df66a45c9dc1113c3a15ee1ee0972b9bf21" exitCode=0 Jan 30 12:05:48 crc kubenswrapper[4869]: I0130 12:05:48.159202 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4rhwd" event={"ID":"4a80f073-6f20-437c-aef3-bb49cd1565ff","Type":"ContainerDied","Data":"548bc1870111c3b4584b6b5ffada4df66a45c9dc1113c3a15ee1ee0972b9bf21"} Jan 30 12:05:48 crc kubenswrapper[4869]: I0130 12:05:48.159226 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4rhwd" event={"ID":"4a80f073-6f20-437c-aef3-bb49cd1565ff","Type":"ContainerStarted","Data":"78e9e0f026888dc1a9ba33fc6f85d38cb5beff47d10edf9017f1ed0766b5066a"} Jan 30 12:05:49 crc kubenswrapper[4869]: I0130 12:05:49.167507 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4rhwd" event={"ID":"4a80f073-6f20-437c-aef3-bb49cd1565ff","Type":"ContainerStarted","Data":"2a655c4382a4bdacb95c6a6dde56be5b23839e03e01768be6d2968dbf67c0e2e"} Jan 30 12:05:50 crc kubenswrapper[4869]: I0130 12:05:50.175770 4869 generic.go:334] "Generic (PLEG): container finished" podID="4a80f073-6f20-437c-aef3-bb49cd1565ff" containerID="2a655c4382a4bdacb95c6a6dde56be5b23839e03e01768be6d2968dbf67c0e2e" exitCode=0 Jan 30 12:05:50 crc kubenswrapper[4869]: I0130 12:05:50.175936 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4rhwd" event={"ID":"4a80f073-6f20-437c-aef3-bb49cd1565ff","Type":"ContainerDied","Data":"2a655c4382a4bdacb95c6a6dde56be5b23839e03e01768be6d2968dbf67c0e2e"} Jan 30 12:05:51 crc kubenswrapper[4869]: I0130 12:05:51.184622 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4rhwd" event={"ID":"4a80f073-6f20-437c-aef3-bb49cd1565ff","Type":"ContainerStarted","Data":"214cdf67c7d255d44995f379955f6731cde03c52910419d75b72890c25bddded"} Jan 30 12:05:51 crc kubenswrapper[4869]: I0130 12:05:51.202784 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4rhwd" podStartSLOduration=2.755705326 podStartE2EDuration="5.202764886s" podCreationTimestamp="2026-01-30 12:05:46 +0000 UTC" firstStartedPulling="2026-01-30 12:05:48.16095443 +0000 UTC m=+4298.710830496" lastFinishedPulling="2026-01-30 12:05:50.60801399 +0000 UTC m=+4301.157890056" observedRunningTime="2026-01-30 12:05:51.202302873 +0000 UTC m=+4301.752178939" watchObservedRunningTime="2026-01-30 12:05:51.202764886 +0000 UTC m=+4301.752640952" Jan 30 12:05:51 crc kubenswrapper[4869]: I0130 12:05:51.769323 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 12:05:51 crc kubenswrapper[4869]: I0130 12:05:51.769654 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 12:05:56 crc kubenswrapper[4869]: I0130 12:05:56.959353 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4rhwd" Jan 30 12:05:56 crc kubenswrapper[4869]: I0130 12:05:56.959701 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4rhwd" Jan 30 12:05:57 crc kubenswrapper[4869]: I0130 12:05:57.000860 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4rhwd" Jan 30 12:05:57 crc kubenswrapper[4869]: I0130 12:05:57.265079 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4rhwd" Jan 30 12:05:57 crc kubenswrapper[4869]: I0130 12:05:57.306229 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4rhwd"] Jan 30 12:05:59 crc kubenswrapper[4869]: I0130 12:05:59.239520 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4rhwd" podUID="4a80f073-6f20-437c-aef3-bb49cd1565ff" containerName="registry-server" containerID="cri-o://214cdf67c7d255d44995f379955f6731cde03c52910419d75b72890c25bddded" gracePeriod=2 Jan 30 12:05:59 crc kubenswrapper[4869]: I0130 12:05:59.796180 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4rhwd" Jan 30 12:05:59 crc kubenswrapper[4869]: I0130 12:05:59.980903 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a80f073-6f20-437c-aef3-bb49cd1565ff-utilities\") pod \"4a80f073-6f20-437c-aef3-bb49cd1565ff\" (UID: \"4a80f073-6f20-437c-aef3-bb49cd1565ff\") " Jan 30 12:05:59 crc kubenswrapper[4869]: I0130 12:05:59.981300 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a80f073-6f20-437c-aef3-bb49cd1565ff-catalog-content\") pod \"4a80f073-6f20-437c-aef3-bb49cd1565ff\" (UID: \"4a80f073-6f20-437c-aef3-bb49cd1565ff\") " Jan 30 12:05:59 crc kubenswrapper[4869]: I0130 12:05:59.981337 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fx2jj\" (UniqueName: \"kubernetes.io/projected/4a80f073-6f20-437c-aef3-bb49cd1565ff-kube-api-access-fx2jj\") pod \"4a80f073-6f20-437c-aef3-bb49cd1565ff\" (UID: \"4a80f073-6f20-437c-aef3-bb49cd1565ff\") " Jan 30 12:05:59 crc kubenswrapper[4869]: I0130 12:05:59.982153 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a80f073-6f20-437c-aef3-bb49cd1565ff-utilities" (OuterVolumeSpecName: "utilities") pod "4a80f073-6f20-437c-aef3-bb49cd1565ff" (UID: "4a80f073-6f20-437c-aef3-bb49cd1565ff"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 12:05:59 crc kubenswrapper[4869]: I0130 12:05:59.990331 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a80f073-6f20-437c-aef3-bb49cd1565ff-kube-api-access-fx2jj" (OuterVolumeSpecName: "kube-api-access-fx2jj") pod "4a80f073-6f20-437c-aef3-bb49cd1565ff" (UID: "4a80f073-6f20-437c-aef3-bb49cd1565ff"). InnerVolumeSpecName "kube-api-access-fx2jj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:06:00 crc kubenswrapper[4869]: I0130 12:06:00.084010 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fx2jj\" (UniqueName: \"kubernetes.io/projected/4a80f073-6f20-437c-aef3-bb49cd1565ff-kube-api-access-fx2jj\") on node \"crc\" DevicePath \"\"" Jan 30 12:06:00 crc kubenswrapper[4869]: I0130 12:06:00.084056 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a80f073-6f20-437c-aef3-bb49cd1565ff-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 12:06:00 crc kubenswrapper[4869]: I0130 12:06:00.114276 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a80f073-6f20-437c-aef3-bb49cd1565ff-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4a80f073-6f20-437c-aef3-bb49cd1565ff" (UID: "4a80f073-6f20-437c-aef3-bb49cd1565ff"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 12:06:00 crc kubenswrapper[4869]: I0130 12:06:00.190900 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a80f073-6f20-437c-aef3-bb49cd1565ff-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 12:06:00 crc kubenswrapper[4869]: I0130 12:06:00.248952 4869 generic.go:334] "Generic (PLEG): container finished" podID="4a80f073-6f20-437c-aef3-bb49cd1565ff" containerID="214cdf67c7d255d44995f379955f6731cde03c52910419d75b72890c25bddded" exitCode=0 Jan 30 12:06:00 crc kubenswrapper[4869]: I0130 12:06:00.248999 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4rhwd" event={"ID":"4a80f073-6f20-437c-aef3-bb49cd1565ff","Type":"ContainerDied","Data":"214cdf67c7d255d44995f379955f6731cde03c52910419d75b72890c25bddded"} Jan 30 12:06:00 crc kubenswrapper[4869]: I0130 12:06:00.249026 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4rhwd" Jan 30 12:06:00 crc kubenswrapper[4869]: I0130 12:06:00.249046 4869 scope.go:117] "RemoveContainer" containerID="214cdf67c7d255d44995f379955f6731cde03c52910419d75b72890c25bddded" Jan 30 12:06:00 crc kubenswrapper[4869]: I0130 12:06:00.249033 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4rhwd" event={"ID":"4a80f073-6f20-437c-aef3-bb49cd1565ff","Type":"ContainerDied","Data":"78e9e0f026888dc1a9ba33fc6f85d38cb5beff47d10edf9017f1ed0766b5066a"} Jan 30 12:06:00 crc kubenswrapper[4869]: I0130 12:06:00.272098 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4rhwd"] Jan 30 12:06:00 crc kubenswrapper[4869]: I0130 12:06:00.277486 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4rhwd"] Jan 30 12:06:00 crc kubenswrapper[4869]: I0130 12:06:00.278984 4869 scope.go:117] "RemoveContainer" containerID="2a655c4382a4bdacb95c6a6dde56be5b23839e03e01768be6d2968dbf67c0e2e" Jan 30 12:06:00 crc kubenswrapper[4869]: I0130 12:06:00.298187 4869 scope.go:117] "RemoveContainer" containerID="548bc1870111c3b4584b6b5ffada4df66a45c9dc1113c3a15ee1ee0972b9bf21" Jan 30 12:06:00 crc kubenswrapper[4869]: I0130 12:06:00.325680 4869 scope.go:117] "RemoveContainer" containerID="214cdf67c7d255d44995f379955f6731cde03c52910419d75b72890c25bddded" Jan 30 12:06:00 crc kubenswrapper[4869]: E0130 12:06:00.326226 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"214cdf67c7d255d44995f379955f6731cde03c52910419d75b72890c25bddded\": container with ID starting with 214cdf67c7d255d44995f379955f6731cde03c52910419d75b72890c25bddded not found: ID does not exist" containerID="214cdf67c7d255d44995f379955f6731cde03c52910419d75b72890c25bddded" Jan 30 12:06:00 crc kubenswrapper[4869]: I0130 12:06:00.326282 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"214cdf67c7d255d44995f379955f6731cde03c52910419d75b72890c25bddded"} err="failed to get container status \"214cdf67c7d255d44995f379955f6731cde03c52910419d75b72890c25bddded\": rpc error: code = NotFound desc = could not find container \"214cdf67c7d255d44995f379955f6731cde03c52910419d75b72890c25bddded\": container with ID starting with 214cdf67c7d255d44995f379955f6731cde03c52910419d75b72890c25bddded not found: ID does not exist" Jan 30 12:06:00 crc kubenswrapper[4869]: I0130 12:06:00.326310 4869 scope.go:117] "RemoveContainer" containerID="2a655c4382a4bdacb95c6a6dde56be5b23839e03e01768be6d2968dbf67c0e2e" Jan 30 12:06:00 crc kubenswrapper[4869]: E0130 12:06:00.326697 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a655c4382a4bdacb95c6a6dde56be5b23839e03e01768be6d2968dbf67c0e2e\": container with ID starting with 2a655c4382a4bdacb95c6a6dde56be5b23839e03e01768be6d2968dbf67c0e2e not found: ID does not exist" containerID="2a655c4382a4bdacb95c6a6dde56be5b23839e03e01768be6d2968dbf67c0e2e" Jan 30 12:06:00 crc kubenswrapper[4869]: I0130 12:06:00.326767 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a655c4382a4bdacb95c6a6dde56be5b23839e03e01768be6d2968dbf67c0e2e"} err="failed to get container status \"2a655c4382a4bdacb95c6a6dde56be5b23839e03e01768be6d2968dbf67c0e2e\": rpc error: code = NotFound desc = could not find container \"2a655c4382a4bdacb95c6a6dde56be5b23839e03e01768be6d2968dbf67c0e2e\": container with ID starting with 2a655c4382a4bdacb95c6a6dde56be5b23839e03e01768be6d2968dbf67c0e2e not found: ID does not exist" Jan 30 12:06:00 crc kubenswrapper[4869]: I0130 12:06:00.326803 4869 scope.go:117] "RemoveContainer" containerID="548bc1870111c3b4584b6b5ffada4df66a45c9dc1113c3a15ee1ee0972b9bf21" Jan 30 12:06:00 crc kubenswrapper[4869]: E0130 12:06:00.327222 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"548bc1870111c3b4584b6b5ffada4df66a45c9dc1113c3a15ee1ee0972b9bf21\": container with ID starting with 548bc1870111c3b4584b6b5ffada4df66a45c9dc1113c3a15ee1ee0972b9bf21 not found: ID does not exist" containerID="548bc1870111c3b4584b6b5ffada4df66a45c9dc1113c3a15ee1ee0972b9bf21" Jan 30 12:06:00 crc kubenswrapper[4869]: I0130 12:06:00.327261 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"548bc1870111c3b4584b6b5ffada4df66a45c9dc1113c3a15ee1ee0972b9bf21"} err="failed to get container status \"548bc1870111c3b4584b6b5ffada4df66a45c9dc1113c3a15ee1ee0972b9bf21\": rpc error: code = NotFound desc = could not find container \"548bc1870111c3b4584b6b5ffada4df66a45c9dc1113c3a15ee1ee0972b9bf21\": container with ID starting with 548bc1870111c3b4584b6b5ffada4df66a45c9dc1113c3a15ee1ee0972b9bf21 not found: ID does not exist" Jan 30 12:06:02 crc kubenswrapper[4869]: I0130 12:06:02.143434 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a80f073-6f20-437c-aef3-bb49cd1565ff" path="/var/lib/kubelet/pods/4a80f073-6f20-437c-aef3-bb49cd1565ff/volumes" Jan 30 12:06:21 crc kubenswrapper[4869]: I0130 12:06:21.770060 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 12:06:21 crc kubenswrapper[4869]: I0130 12:06:21.770648 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 12:06:51 crc kubenswrapper[4869]: I0130 12:06:51.769600 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 12:06:51 crc kubenswrapper[4869]: I0130 12:06:51.770139 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 12:06:51 crc kubenswrapper[4869]: I0130 12:06:51.770184 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 12:06:51 crc kubenswrapper[4869]: I0130 12:06:51.770765 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f3c0492f240ef8079c5a536a55c71f07e73380e7d54376f9d12ab4006ba8c385"} pod="openshift-machine-config-operator/machine-config-daemon-99lr2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 12:06:51 crc kubenswrapper[4869]: I0130 12:06:51.770818 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" containerID="cri-o://f3c0492f240ef8079c5a536a55c71f07e73380e7d54376f9d12ab4006ba8c385" gracePeriod=600 Jan 30 12:06:52 crc kubenswrapper[4869]: I0130 12:06:52.609995 4869 generic.go:334] "Generic (PLEG): container finished" podID="ef13186b-7f82-4025-97e3-d899be8c207f" containerID="f3c0492f240ef8079c5a536a55c71f07e73380e7d54376f9d12ab4006ba8c385" exitCode=0 Jan 30 12:06:52 crc kubenswrapper[4869]: I0130 12:06:52.610509 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerDied","Data":"f3c0492f240ef8079c5a536a55c71f07e73380e7d54376f9d12ab4006ba8c385"} Jan 30 12:06:52 crc kubenswrapper[4869]: I0130 12:06:52.610540 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerStarted","Data":"99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4"} Jan 30 12:06:52 crc kubenswrapper[4869]: I0130 12:06:52.610556 4869 scope.go:117] "RemoveContainer" containerID="18737fec76a7352bd8ea858d14d6a01cc7d36a5b2623d7cd4e9f2656af9932b4" Jan 30 12:06:53 crc kubenswrapper[4869]: I0130 12:06:53.570781 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-zcxm2"] Jan 30 12:06:53 crc kubenswrapper[4869]: I0130 12:06:53.576491 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-zcxm2"] Jan 30 12:06:53 crc kubenswrapper[4869]: I0130 12:06:53.712262 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-cv5p2"] Jan 30 12:06:53 crc kubenswrapper[4869]: E0130 12:06:53.712610 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a80f073-6f20-437c-aef3-bb49cd1565ff" containerName="extract-content" Jan 30 12:06:53 crc kubenswrapper[4869]: I0130 12:06:53.712631 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a80f073-6f20-437c-aef3-bb49cd1565ff" containerName="extract-content" Jan 30 12:06:53 crc kubenswrapper[4869]: E0130 12:06:53.712662 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a80f073-6f20-437c-aef3-bb49cd1565ff" containerName="registry-server" Jan 30 12:06:53 crc kubenswrapper[4869]: I0130 12:06:53.712669 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a80f073-6f20-437c-aef3-bb49cd1565ff" containerName="registry-server" Jan 30 12:06:53 crc kubenswrapper[4869]: E0130 12:06:53.712682 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a80f073-6f20-437c-aef3-bb49cd1565ff" containerName="extract-utilities" Jan 30 12:06:53 crc kubenswrapper[4869]: I0130 12:06:53.712689 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a80f073-6f20-437c-aef3-bb49cd1565ff" containerName="extract-utilities" Jan 30 12:06:53 crc kubenswrapper[4869]: I0130 12:06:53.712848 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a80f073-6f20-437c-aef3-bb49cd1565ff" containerName="registry-server" Jan 30 12:06:53 crc kubenswrapper[4869]: I0130 12:06:53.713336 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-cv5p2" Jan 30 12:06:53 crc kubenswrapper[4869]: I0130 12:06:53.715110 4869 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-qxrkw" Jan 30 12:06:53 crc kubenswrapper[4869]: I0130 12:06:53.716445 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 30 12:06:53 crc kubenswrapper[4869]: I0130 12:06:53.716863 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 30 12:06:53 crc kubenswrapper[4869]: I0130 12:06:53.723185 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-cv5p2"] Jan 30 12:06:53 crc kubenswrapper[4869]: I0130 12:06:53.724677 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 30 12:06:53 crc kubenswrapper[4869]: I0130 12:06:53.849334 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d13f6903-359b-4ad4-abfa-c01ed06366b0-node-mnt\") pod \"crc-storage-crc-cv5p2\" (UID: \"d13f6903-359b-4ad4-abfa-c01ed06366b0\") " pod="crc-storage/crc-storage-crc-cv5p2" Jan 30 12:06:53 crc kubenswrapper[4869]: I0130 12:06:53.849376 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d13f6903-359b-4ad4-abfa-c01ed06366b0-crc-storage\") pod \"crc-storage-crc-cv5p2\" (UID: \"d13f6903-359b-4ad4-abfa-c01ed06366b0\") " pod="crc-storage/crc-storage-crc-cv5p2" Jan 30 12:06:53 crc kubenswrapper[4869]: I0130 12:06:53.849397 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8prm\" (UniqueName: \"kubernetes.io/projected/d13f6903-359b-4ad4-abfa-c01ed06366b0-kube-api-access-j8prm\") pod \"crc-storage-crc-cv5p2\" (UID: \"d13f6903-359b-4ad4-abfa-c01ed06366b0\") " pod="crc-storage/crc-storage-crc-cv5p2" Jan 30 12:06:53 crc kubenswrapper[4869]: I0130 12:06:53.950383 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d13f6903-359b-4ad4-abfa-c01ed06366b0-node-mnt\") pod \"crc-storage-crc-cv5p2\" (UID: \"d13f6903-359b-4ad4-abfa-c01ed06366b0\") " pod="crc-storage/crc-storage-crc-cv5p2" Jan 30 12:06:53 crc kubenswrapper[4869]: I0130 12:06:53.950770 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d13f6903-359b-4ad4-abfa-c01ed06366b0-crc-storage\") pod \"crc-storage-crc-cv5p2\" (UID: \"d13f6903-359b-4ad4-abfa-c01ed06366b0\") " pod="crc-storage/crc-storage-crc-cv5p2" Jan 30 12:06:53 crc kubenswrapper[4869]: I0130 12:06:53.950825 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8prm\" (UniqueName: \"kubernetes.io/projected/d13f6903-359b-4ad4-abfa-c01ed06366b0-kube-api-access-j8prm\") pod \"crc-storage-crc-cv5p2\" (UID: \"d13f6903-359b-4ad4-abfa-c01ed06366b0\") " pod="crc-storage/crc-storage-crc-cv5p2" Jan 30 12:06:53 crc kubenswrapper[4869]: I0130 12:06:53.951489 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d13f6903-359b-4ad4-abfa-c01ed06366b0-node-mnt\") pod \"crc-storage-crc-cv5p2\" (UID: \"d13f6903-359b-4ad4-abfa-c01ed06366b0\") " pod="crc-storage/crc-storage-crc-cv5p2" Jan 30 12:06:53 crc kubenswrapper[4869]: I0130 12:06:53.952756 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d13f6903-359b-4ad4-abfa-c01ed06366b0-crc-storage\") pod \"crc-storage-crc-cv5p2\" (UID: \"d13f6903-359b-4ad4-abfa-c01ed06366b0\") " pod="crc-storage/crc-storage-crc-cv5p2" Jan 30 12:06:53 crc kubenswrapper[4869]: I0130 12:06:53.979889 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8prm\" (UniqueName: \"kubernetes.io/projected/d13f6903-359b-4ad4-abfa-c01ed06366b0-kube-api-access-j8prm\") pod \"crc-storage-crc-cv5p2\" (UID: \"d13f6903-359b-4ad4-abfa-c01ed06366b0\") " pod="crc-storage/crc-storage-crc-cv5p2" Jan 30 12:06:54 crc kubenswrapper[4869]: I0130 12:06:54.030691 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-cv5p2" Jan 30 12:06:54 crc kubenswrapper[4869]: I0130 12:06:54.144898 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f08cad2-3aae-4ab0-bea9-b9242a2de799" path="/var/lib/kubelet/pods/5f08cad2-3aae-4ab0-bea9-b9242a2de799/volumes" Jan 30 12:06:54 crc kubenswrapper[4869]: I0130 12:06:54.485761 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-cv5p2"] Jan 30 12:06:54 crc kubenswrapper[4869]: W0130 12:06:54.491293 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd13f6903_359b_4ad4_abfa_c01ed06366b0.slice/crio-f249c1941af1097d3789c6a0dbb8bc2095bbcebe04aabbca62c566485034efb9 WatchSource:0}: Error finding container f249c1941af1097d3789c6a0dbb8bc2095bbcebe04aabbca62c566485034efb9: Status 404 returned error can't find the container with id f249c1941af1097d3789c6a0dbb8bc2095bbcebe04aabbca62c566485034efb9 Jan 30 12:06:54 crc kubenswrapper[4869]: I0130 12:06:54.627240 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-cv5p2" event={"ID":"d13f6903-359b-4ad4-abfa-c01ed06366b0","Type":"ContainerStarted","Data":"f249c1941af1097d3789c6a0dbb8bc2095bbcebe04aabbca62c566485034efb9"} Jan 30 12:06:55 crc kubenswrapper[4869]: I0130 12:06:55.636730 4869 generic.go:334] "Generic (PLEG): container finished" podID="d13f6903-359b-4ad4-abfa-c01ed06366b0" containerID="ec10f3d2f632f1bae5577994d31ce20d3ee74c3e304b83a2dda2a86f0c712516" exitCode=0 Jan 30 12:06:55 crc kubenswrapper[4869]: I0130 12:06:55.636771 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-cv5p2" event={"ID":"d13f6903-359b-4ad4-abfa-c01ed06366b0","Type":"ContainerDied","Data":"ec10f3d2f632f1bae5577994d31ce20d3ee74c3e304b83a2dda2a86f0c712516"} Jan 30 12:06:56 crc kubenswrapper[4869]: I0130 12:06:56.941133 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-cv5p2" Jan 30 12:06:57 crc kubenswrapper[4869]: I0130 12:06:57.097427 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d13f6903-359b-4ad4-abfa-c01ed06366b0-node-mnt\") pod \"d13f6903-359b-4ad4-abfa-c01ed06366b0\" (UID: \"d13f6903-359b-4ad4-abfa-c01ed06366b0\") " Jan 30 12:06:57 crc kubenswrapper[4869]: I0130 12:06:57.097519 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j8prm\" (UniqueName: \"kubernetes.io/projected/d13f6903-359b-4ad4-abfa-c01ed06366b0-kube-api-access-j8prm\") pod \"d13f6903-359b-4ad4-abfa-c01ed06366b0\" (UID: \"d13f6903-359b-4ad4-abfa-c01ed06366b0\") " Jan 30 12:06:57 crc kubenswrapper[4869]: I0130 12:06:57.097600 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d13f6903-359b-4ad4-abfa-c01ed06366b0-crc-storage\") pod \"d13f6903-359b-4ad4-abfa-c01ed06366b0\" (UID: \"d13f6903-359b-4ad4-abfa-c01ed06366b0\") " Jan 30 12:06:57 crc kubenswrapper[4869]: I0130 12:06:57.097994 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d13f6903-359b-4ad4-abfa-c01ed06366b0-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "d13f6903-359b-4ad4-abfa-c01ed06366b0" (UID: "d13f6903-359b-4ad4-abfa-c01ed06366b0"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 12:06:57 crc kubenswrapper[4869]: I0130 12:06:57.108031 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d13f6903-359b-4ad4-abfa-c01ed06366b0-kube-api-access-j8prm" (OuterVolumeSpecName: "kube-api-access-j8prm") pod "d13f6903-359b-4ad4-abfa-c01ed06366b0" (UID: "d13f6903-359b-4ad4-abfa-c01ed06366b0"). InnerVolumeSpecName "kube-api-access-j8prm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:06:57 crc kubenswrapper[4869]: I0130 12:06:57.116124 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d13f6903-359b-4ad4-abfa-c01ed06366b0-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "d13f6903-359b-4ad4-abfa-c01ed06366b0" (UID: "d13f6903-359b-4ad4-abfa-c01ed06366b0"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 12:06:57 crc kubenswrapper[4869]: I0130 12:06:57.199152 4869 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d13f6903-359b-4ad4-abfa-c01ed06366b0-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 30 12:06:57 crc kubenswrapper[4869]: I0130 12:06:57.199196 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j8prm\" (UniqueName: \"kubernetes.io/projected/d13f6903-359b-4ad4-abfa-c01ed06366b0-kube-api-access-j8prm\") on node \"crc\" DevicePath \"\"" Jan 30 12:06:57 crc kubenswrapper[4869]: I0130 12:06:57.199210 4869 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d13f6903-359b-4ad4-abfa-c01ed06366b0-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 30 12:06:57 crc kubenswrapper[4869]: I0130 12:06:57.656183 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-cv5p2" event={"ID":"d13f6903-359b-4ad4-abfa-c01ed06366b0","Type":"ContainerDied","Data":"f249c1941af1097d3789c6a0dbb8bc2095bbcebe04aabbca62c566485034efb9"} Jan 30 12:06:57 crc kubenswrapper[4869]: I0130 12:06:57.656230 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-cv5p2" Jan 30 12:06:57 crc kubenswrapper[4869]: I0130 12:06:57.656227 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f249c1941af1097d3789c6a0dbb8bc2095bbcebe04aabbca62c566485034efb9" Jan 30 12:06:59 crc kubenswrapper[4869]: I0130 12:06:59.256920 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-cv5p2"] Jan 30 12:06:59 crc kubenswrapper[4869]: I0130 12:06:59.261667 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-cv5p2"] Jan 30 12:06:59 crc kubenswrapper[4869]: I0130 12:06:59.393430 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-j66rz"] Jan 30 12:06:59 crc kubenswrapper[4869]: E0130 12:06:59.393934 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d13f6903-359b-4ad4-abfa-c01ed06366b0" containerName="storage" Jan 30 12:06:59 crc kubenswrapper[4869]: I0130 12:06:59.393958 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d13f6903-359b-4ad4-abfa-c01ed06366b0" containerName="storage" Jan 30 12:06:59 crc kubenswrapper[4869]: I0130 12:06:59.394128 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="d13f6903-359b-4ad4-abfa-c01ed06366b0" containerName="storage" Jan 30 12:06:59 crc kubenswrapper[4869]: I0130 12:06:59.394802 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-j66rz" Jan 30 12:06:59 crc kubenswrapper[4869]: I0130 12:06:59.397014 4869 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-qxrkw" Jan 30 12:06:59 crc kubenswrapper[4869]: I0130 12:06:59.397431 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 30 12:06:59 crc kubenswrapper[4869]: I0130 12:06:59.397444 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 30 12:06:59 crc kubenswrapper[4869]: I0130 12:06:59.397620 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 30 12:06:59 crc kubenswrapper[4869]: I0130 12:06:59.408444 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-j66rz"] Jan 30 12:06:59 crc kubenswrapper[4869]: I0130 12:06:59.532774 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrfqq\" (UniqueName: \"kubernetes.io/projected/c1be86c4-f4ff-41f6-bea2-d62b64e0f97b-kube-api-access-lrfqq\") pod \"crc-storage-crc-j66rz\" (UID: \"c1be86c4-f4ff-41f6-bea2-d62b64e0f97b\") " pod="crc-storage/crc-storage-crc-j66rz" Jan 30 12:06:59 crc kubenswrapper[4869]: I0130 12:06:59.533198 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/c1be86c4-f4ff-41f6-bea2-d62b64e0f97b-crc-storage\") pod \"crc-storage-crc-j66rz\" (UID: \"c1be86c4-f4ff-41f6-bea2-d62b64e0f97b\") " pod="crc-storage/crc-storage-crc-j66rz" Jan 30 12:06:59 crc kubenswrapper[4869]: I0130 12:06:59.533239 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/c1be86c4-f4ff-41f6-bea2-d62b64e0f97b-node-mnt\") pod \"crc-storage-crc-j66rz\" (UID: \"c1be86c4-f4ff-41f6-bea2-d62b64e0f97b\") " pod="crc-storage/crc-storage-crc-j66rz" Jan 30 12:06:59 crc kubenswrapper[4869]: I0130 12:06:59.634720 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/c1be86c4-f4ff-41f6-bea2-d62b64e0f97b-crc-storage\") pod \"crc-storage-crc-j66rz\" (UID: \"c1be86c4-f4ff-41f6-bea2-d62b64e0f97b\") " pod="crc-storage/crc-storage-crc-j66rz" Jan 30 12:06:59 crc kubenswrapper[4869]: I0130 12:06:59.634799 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/c1be86c4-f4ff-41f6-bea2-d62b64e0f97b-node-mnt\") pod \"crc-storage-crc-j66rz\" (UID: \"c1be86c4-f4ff-41f6-bea2-d62b64e0f97b\") " pod="crc-storage/crc-storage-crc-j66rz" Jan 30 12:06:59 crc kubenswrapper[4869]: I0130 12:06:59.634872 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrfqq\" (UniqueName: \"kubernetes.io/projected/c1be86c4-f4ff-41f6-bea2-d62b64e0f97b-kube-api-access-lrfqq\") pod \"crc-storage-crc-j66rz\" (UID: \"c1be86c4-f4ff-41f6-bea2-d62b64e0f97b\") " pod="crc-storage/crc-storage-crc-j66rz" Jan 30 12:06:59 crc kubenswrapper[4869]: I0130 12:06:59.635151 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/c1be86c4-f4ff-41f6-bea2-d62b64e0f97b-node-mnt\") pod \"crc-storage-crc-j66rz\" (UID: \"c1be86c4-f4ff-41f6-bea2-d62b64e0f97b\") " pod="crc-storage/crc-storage-crc-j66rz" Jan 30 12:06:59 crc kubenswrapper[4869]: I0130 12:06:59.635659 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/c1be86c4-f4ff-41f6-bea2-d62b64e0f97b-crc-storage\") pod \"crc-storage-crc-j66rz\" (UID: \"c1be86c4-f4ff-41f6-bea2-d62b64e0f97b\") " pod="crc-storage/crc-storage-crc-j66rz" Jan 30 12:06:59 crc kubenswrapper[4869]: I0130 12:06:59.651425 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrfqq\" (UniqueName: \"kubernetes.io/projected/c1be86c4-f4ff-41f6-bea2-d62b64e0f97b-kube-api-access-lrfqq\") pod \"crc-storage-crc-j66rz\" (UID: \"c1be86c4-f4ff-41f6-bea2-d62b64e0f97b\") " pod="crc-storage/crc-storage-crc-j66rz" Jan 30 12:06:59 crc kubenswrapper[4869]: I0130 12:06:59.714514 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-j66rz" Jan 30 12:07:00 crc kubenswrapper[4869]: I0130 12:07:00.148864 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d13f6903-359b-4ad4-abfa-c01ed06366b0" path="/var/lib/kubelet/pods/d13f6903-359b-4ad4-abfa-c01ed06366b0/volumes" Jan 30 12:07:00 crc kubenswrapper[4869]: I0130 12:07:00.156896 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-j66rz"] Jan 30 12:07:00 crc kubenswrapper[4869]: I0130 12:07:00.675444 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-j66rz" event={"ID":"c1be86c4-f4ff-41f6-bea2-d62b64e0f97b","Type":"ContainerStarted","Data":"21baed559578bc74769c3338946bfb8a1bb4bc45392a451feb6eb83b275bddce"} Jan 30 12:07:01 crc kubenswrapper[4869]: I0130 12:07:01.688035 4869 generic.go:334] "Generic (PLEG): container finished" podID="c1be86c4-f4ff-41f6-bea2-d62b64e0f97b" containerID="9925227717fe2c7b087eb60464c2086be8c6441939ffc962ec5c9c45913c4523" exitCode=0 Jan 30 12:07:01 crc kubenswrapper[4869]: I0130 12:07:01.688145 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-j66rz" event={"ID":"c1be86c4-f4ff-41f6-bea2-d62b64e0f97b","Type":"ContainerDied","Data":"9925227717fe2c7b087eb60464c2086be8c6441939ffc962ec5c9c45913c4523"} Jan 30 12:07:02 crc kubenswrapper[4869]: I0130 12:07:02.947288 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-j66rz" Jan 30 12:07:03 crc kubenswrapper[4869]: I0130 12:07:03.089318 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/c1be86c4-f4ff-41f6-bea2-d62b64e0f97b-node-mnt\") pod \"c1be86c4-f4ff-41f6-bea2-d62b64e0f97b\" (UID: \"c1be86c4-f4ff-41f6-bea2-d62b64e0f97b\") " Jan 30 12:07:03 crc kubenswrapper[4869]: I0130 12:07:03.089387 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/c1be86c4-f4ff-41f6-bea2-d62b64e0f97b-crc-storage\") pod \"c1be86c4-f4ff-41f6-bea2-d62b64e0f97b\" (UID: \"c1be86c4-f4ff-41f6-bea2-d62b64e0f97b\") " Jan 30 12:07:03 crc kubenswrapper[4869]: I0130 12:07:03.089428 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lrfqq\" (UniqueName: \"kubernetes.io/projected/c1be86c4-f4ff-41f6-bea2-d62b64e0f97b-kube-api-access-lrfqq\") pod \"c1be86c4-f4ff-41f6-bea2-d62b64e0f97b\" (UID: \"c1be86c4-f4ff-41f6-bea2-d62b64e0f97b\") " Jan 30 12:07:03 crc kubenswrapper[4869]: I0130 12:07:03.089473 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c1be86c4-f4ff-41f6-bea2-d62b64e0f97b-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "c1be86c4-f4ff-41f6-bea2-d62b64e0f97b" (UID: "c1be86c4-f4ff-41f6-bea2-d62b64e0f97b"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 12:07:03 crc kubenswrapper[4869]: I0130 12:07:03.089785 4869 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/c1be86c4-f4ff-41f6-bea2-d62b64e0f97b-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 30 12:07:03 crc kubenswrapper[4869]: I0130 12:07:03.094221 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1be86c4-f4ff-41f6-bea2-d62b64e0f97b-kube-api-access-lrfqq" (OuterVolumeSpecName: "kube-api-access-lrfqq") pod "c1be86c4-f4ff-41f6-bea2-d62b64e0f97b" (UID: "c1be86c4-f4ff-41f6-bea2-d62b64e0f97b"). InnerVolumeSpecName "kube-api-access-lrfqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:07:03 crc kubenswrapper[4869]: I0130 12:07:03.107194 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1be86c4-f4ff-41f6-bea2-d62b64e0f97b-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "c1be86c4-f4ff-41f6-bea2-d62b64e0f97b" (UID: "c1be86c4-f4ff-41f6-bea2-d62b64e0f97b"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 12:07:03 crc kubenswrapper[4869]: I0130 12:07:03.191412 4869 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/c1be86c4-f4ff-41f6-bea2-d62b64e0f97b-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 30 12:07:03 crc kubenswrapper[4869]: I0130 12:07:03.191441 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lrfqq\" (UniqueName: \"kubernetes.io/projected/c1be86c4-f4ff-41f6-bea2-d62b64e0f97b-kube-api-access-lrfqq\") on node \"crc\" DevicePath \"\"" Jan 30 12:07:03 crc kubenswrapper[4869]: I0130 12:07:03.704145 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-j66rz" event={"ID":"c1be86c4-f4ff-41f6-bea2-d62b64e0f97b","Type":"ContainerDied","Data":"21baed559578bc74769c3338946bfb8a1bb4bc45392a451feb6eb83b275bddce"} Jan 30 12:07:03 crc kubenswrapper[4869]: I0130 12:07:03.704187 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="21baed559578bc74769c3338946bfb8a1bb4bc45392a451feb6eb83b275bddce" Jan 30 12:07:03 crc kubenswrapper[4869]: I0130 12:07:03.704208 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-j66rz" Jan 30 12:07:15 crc kubenswrapper[4869]: I0130 12:07:15.621903 4869 scope.go:117] "RemoveContainer" containerID="1ebd791dc091581a1f56fd9769496c84e9cd7d7c4cde3e9e92c4620a2d7c04b4" Jan 30 12:09:21 crc kubenswrapper[4869]: I0130 12:09:21.769622 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 12:09:21 crc kubenswrapper[4869]: I0130 12:09:21.770148 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 12:09:51 crc kubenswrapper[4869]: I0130 12:09:51.769357 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 12:09:51 crc kubenswrapper[4869]: I0130 12:09:51.770077 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 12:09:54 crc kubenswrapper[4869]: I0130 12:09:54.683392 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wj22b"] Jan 30 12:09:54 crc kubenswrapper[4869]: E0130 12:09:54.683924 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1be86c4-f4ff-41f6-bea2-d62b64e0f97b" containerName="storage" Jan 30 12:09:54 crc kubenswrapper[4869]: I0130 12:09:54.683936 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1be86c4-f4ff-41f6-bea2-d62b64e0f97b" containerName="storage" Jan 30 12:09:54 crc kubenswrapper[4869]: I0130 12:09:54.684093 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1be86c4-f4ff-41f6-bea2-d62b64e0f97b" containerName="storage" Jan 30 12:09:54 crc kubenswrapper[4869]: I0130 12:09:54.685126 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wj22b" Jan 30 12:09:54 crc kubenswrapper[4869]: I0130 12:09:54.705293 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wj22b"] Jan 30 12:09:54 crc kubenswrapper[4869]: I0130 12:09:54.716618 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8c82ad8-9c51-495b-9d1f-18cae9fd70ef-utilities\") pod \"redhat-marketplace-wj22b\" (UID: \"b8c82ad8-9c51-495b-9d1f-18cae9fd70ef\") " pod="openshift-marketplace/redhat-marketplace-wj22b" Jan 30 12:09:54 crc kubenswrapper[4869]: I0130 12:09:54.716738 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lxtc\" (UniqueName: \"kubernetes.io/projected/b8c82ad8-9c51-495b-9d1f-18cae9fd70ef-kube-api-access-6lxtc\") pod \"redhat-marketplace-wj22b\" (UID: \"b8c82ad8-9c51-495b-9d1f-18cae9fd70ef\") " pod="openshift-marketplace/redhat-marketplace-wj22b" Jan 30 12:09:54 crc kubenswrapper[4869]: I0130 12:09:54.716790 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8c82ad8-9c51-495b-9d1f-18cae9fd70ef-catalog-content\") pod \"redhat-marketplace-wj22b\" (UID: \"b8c82ad8-9c51-495b-9d1f-18cae9fd70ef\") " pod="openshift-marketplace/redhat-marketplace-wj22b" Jan 30 12:09:54 crc kubenswrapper[4869]: I0130 12:09:54.818039 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lxtc\" (UniqueName: \"kubernetes.io/projected/b8c82ad8-9c51-495b-9d1f-18cae9fd70ef-kube-api-access-6lxtc\") pod \"redhat-marketplace-wj22b\" (UID: \"b8c82ad8-9c51-495b-9d1f-18cae9fd70ef\") " pod="openshift-marketplace/redhat-marketplace-wj22b" Jan 30 12:09:54 crc kubenswrapper[4869]: I0130 12:09:54.818140 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8c82ad8-9c51-495b-9d1f-18cae9fd70ef-catalog-content\") pod \"redhat-marketplace-wj22b\" (UID: \"b8c82ad8-9c51-495b-9d1f-18cae9fd70ef\") " pod="openshift-marketplace/redhat-marketplace-wj22b" Jan 30 12:09:54 crc kubenswrapper[4869]: I0130 12:09:54.818217 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8c82ad8-9c51-495b-9d1f-18cae9fd70ef-utilities\") pod \"redhat-marketplace-wj22b\" (UID: \"b8c82ad8-9c51-495b-9d1f-18cae9fd70ef\") " pod="openshift-marketplace/redhat-marketplace-wj22b" Jan 30 12:09:54 crc kubenswrapper[4869]: I0130 12:09:54.818753 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8c82ad8-9c51-495b-9d1f-18cae9fd70ef-catalog-content\") pod \"redhat-marketplace-wj22b\" (UID: \"b8c82ad8-9c51-495b-9d1f-18cae9fd70ef\") " pod="openshift-marketplace/redhat-marketplace-wj22b" Jan 30 12:09:54 crc kubenswrapper[4869]: I0130 12:09:54.818836 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8c82ad8-9c51-495b-9d1f-18cae9fd70ef-utilities\") pod \"redhat-marketplace-wj22b\" (UID: \"b8c82ad8-9c51-495b-9d1f-18cae9fd70ef\") " pod="openshift-marketplace/redhat-marketplace-wj22b" Jan 30 12:09:54 crc kubenswrapper[4869]: I0130 12:09:54.840683 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lxtc\" (UniqueName: \"kubernetes.io/projected/b8c82ad8-9c51-495b-9d1f-18cae9fd70ef-kube-api-access-6lxtc\") pod \"redhat-marketplace-wj22b\" (UID: \"b8c82ad8-9c51-495b-9d1f-18cae9fd70ef\") " pod="openshift-marketplace/redhat-marketplace-wj22b" Jan 30 12:09:55 crc kubenswrapper[4869]: I0130 12:09:55.014031 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wj22b" Jan 30 12:09:55 crc kubenswrapper[4869]: I0130 12:09:55.441104 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wj22b"] Jan 30 12:09:55 crc kubenswrapper[4869]: E0130 12:09:55.758986 4869 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb8c82ad8_9c51_495b_9d1f_18cae9fd70ef.slice/crio-conmon-50add266365cfc655d16a95c2987f079f0acf3eaf489a70c8e994a1fa055a982.scope\": RecentStats: unable to find data in memory cache]" Jan 30 12:09:55 crc kubenswrapper[4869]: I0130 12:09:55.873671 4869 generic.go:334] "Generic (PLEG): container finished" podID="b8c82ad8-9c51-495b-9d1f-18cae9fd70ef" containerID="50add266365cfc655d16a95c2987f079f0acf3eaf489a70c8e994a1fa055a982" exitCode=0 Jan 30 12:09:55 crc kubenswrapper[4869]: I0130 12:09:55.873749 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wj22b" event={"ID":"b8c82ad8-9c51-495b-9d1f-18cae9fd70ef","Type":"ContainerDied","Data":"50add266365cfc655d16a95c2987f079f0acf3eaf489a70c8e994a1fa055a982"} Jan 30 12:09:55 crc kubenswrapper[4869]: I0130 12:09:55.873805 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wj22b" event={"ID":"b8c82ad8-9c51-495b-9d1f-18cae9fd70ef","Type":"ContainerStarted","Data":"dd1952dc62e591a1ff9902575b2e36f14ad14ba0918a676d2f517c2a2f2a68ca"} Jan 30 12:09:55 crc kubenswrapper[4869]: I0130 12:09:55.876261 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 12:09:56 crc kubenswrapper[4869]: I0130 12:09:56.884084 4869 generic.go:334] "Generic (PLEG): container finished" podID="b8c82ad8-9c51-495b-9d1f-18cae9fd70ef" containerID="b010589ab6ca6a61afa579ebd507e6f67eb0fddaa91e8adf96b7125a7d5010a4" exitCode=0 Jan 30 12:09:56 crc kubenswrapper[4869]: I0130 12:09:56.884358 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wj22b" event={"ID":"b8c82ad8-9c51-495b-9d1f-18cae9fd70ef","Type":"ContainerDied","Data":"b010589ab6ca6a61afa579ebd507e6f67eb0fddaa91e8adf96b7125a7d5010a4"} Jan 30 12:09:57 crc kubenswrapper[4869]: I0130 12:09:57.893168 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wj22b" event={"ID":"b8c82ad8-9c51-495b-9d1f-18cae9fd70ef","Type":"ContainerStarted","Data":"2a33b908d5df17132e51ea60b49ee4904199aa9a430118619231ca148f564a30"} Jan 30 12:09:57 crc kubenswrapper[4869]: I0130 12:09:57.913278 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wj22b" podStartSLOduration=2.523290432 podStartE2EDuration="3.913257272s" podCreationTimestamp="2026-01-30 12:09:54 +0000 UTC" firstStartedPulling="2026-01-30 12:09:55.876042269 +0000 UTC m=+4546.425918325" lastFinishedPulling="2026-01-30 12:09:57.266009099 +0000 UTC m=+4547.815885165" observedRunningTime="2026-01-30 12:09:57.911166982 +0000 UTC m=+4548.461043058" watchObservedRunningTime="2026-01-30 12:09:57.913257272 +0000 UTC m=+4548.463133338" Jan 30 12:10:05 crc kubenswrapper[4869]: I0130 12:10:05.014808 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wj22b" Jan 30 12:10:05 crc kubenswrapper[4869]: I0130 12:10:05.015101 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wj22b" Jan 30 12:10:05 crc kubenswrapper[4869]: I0130 12:10:05.059299 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wj22b" Jan 30 12:10:05 crc kubenswrapper[4869]: I0130 12:10:05.983876 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wj22b" Jan 30 12:10:06 crc kubenswrapper[4869]: I0130 12:10:06.029474 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wj22b"] Jan 30 12:10:07 crc kubenswrapper[4869]: I0130 12:10:07.956010 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wj22b" podUID="b8c82ad8-9c51-495b-9d1f-18cae9fd70ef" containerName="registry-server" containerID="cri-o://2a33b908d5df17132e51ea60b49ee4904199aa9a430118619231ca148f564a30" gracePeriod=2 Jan 30 12:10:08 crc kubenswrapper[4869]: I0130 12:10:08.450308 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wj22b" Jan 30 12:10:08 crc kubenswrapper[4869]: I0130 12:10:08.621039 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8c82ad8-9c51-495b-9d1f-18cae9fd70ef-utilities\") pod \"b8c82ad8-9c51-495b-9d1f-18cae9fd70ef\" (UID: \"b8c82ad8-9c51-495b-9d1f-18cae9fd70ef\") " Jan 30 12:10:08 crc kubenswrapper[4869]: I0130 12:10:08.621168 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lxtc\" (UniqueName: \"kubernetes.io/projected/b8c82ad8-9c51-495b-9d1f-18cae9fd70ef-kube-api-access-6lxtc\") pod \"b8c82ad8-9c51-495b-9d1f-18cae9fd70ef\" (UID: \"b8c82ad8-9c51-495b-9d1f-18cae9fd70ef\") " Jan 30 12:10:08 crc kubenswrapper[4869]: I0130 12:10:08.621296 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8c82ad8-9c51-495b-9d1f-18cae9fd70ef-catalog-content\") pod \"b8c82ad8-9c51-495b-9d1f-18cae9fd70ef\" (UID: \"b8c82ad8-9c51-495b-9d1f-18cae9fd70ef\") " Jan 30 12:10:08 crc kubenswrapper[4869]: I0130 12:10:08.622194 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8c82ad8-9c51-495b-9d1f-18cae9fd70ef-utilities" (OuterVolumeSpecName: "utilities") pod "b8c82ad8-9c51-495b-9d1f-18cae9fd70ef" (UID: "b8c82ad8-9c51-495b-9d1f-18cae9fd70ef"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 12:10:08 crc kubenswrapper[4869]: I0130 12:10:08.629370 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8c82ad8-9c51-495b-9d1f-18cae9fd70ef-kube-api-access-6lxtc" (OuterVolumeSpecName: "kube-api-access-6lxtc") pod "b8c82ad8-9c51-495b-9d1f-18cae9fd70ef" (UID: "b8c82ad8-9c51-495b-9d1f-18cae9fd70ef"). InnerVolumeSpecName "kube-api-access-6lxtc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:10:08 crc kubenswrapper[4869]: I0130 12:10:08.647021 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8c82ad8-9c51-495b-9d1f-18cae9fd70ef-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b8c82ad8-9c51-495b-9d1f-18cae9fd70ef" (UID: "b8c82ad8-9c51-495b-9d1f-18cae9fd70ef"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 12:10:08 crc kubenswrapper[4869]: I0130 12:10:08.723221 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lxtc\" (UniqueName: \"kubernetes.io/projected/b8c82ad8-9c51-495b-9d1f-18cae9fd70ef-kube-api-access-6lxtc\") on node \"crc\" DevicePath \"\"" Jan 30 12:10:08 crc kubenswrapper[4869]: I0130 12:10:08.723255 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8c82ad8-9c51-495b-9d1f-18cae9fd70ef-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 12:10:08 crc kubenswrapper[4869]: I0130 12:10:08.723264 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8c82ad8-9c51-495b-9d1f-18cae9fd70ef-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 12:10:08 crc kubenswrapper[4869]: I0130 12:10:08.966692 4869 generic.go:334] "Generic (PLEG): container finished" podID="b8c82ad8-9c51-495b-9d1f-18cae9fd70ef" containerID="2a33b908d5df17132e51ea60b49ee4904199aa9a430118619231ca148f564a30" exitCode=0 Jan 30 12:10:08 crc kubenswrapper[4869]: I0130 12:10:08.966749 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wj22b" event={"ID":"b8c82ad8-9c51-495b-9d1f-18cae9fd70ef","Type":"ContainerDied","Data":"2a33b908d5df17132e51ea60b49ee4904199aa9a430118619231ca148f564a30"} Jan 30 12:10:08 crc kubenswrapper[4869]: I0130 12:10:08.966776 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wj22b" event={"ID":"b8c82ad8-9c51-495b-9d1f-18cae9fd70ef","Type":"ContainerDied","Data":"dd1952dc62e591a1ff9902575b2e36f14ad14ba0918a676d2f517c2a2f2a68ca"} Jan 30 12:10:08 crc kubenswrapper[4869]: I0130 12:10:08.966792 4869 scope.go:117] "RemoveContainer" containerID="2a33b908d5df17132e51ea60b49ee4904199aa9a430118619231ca148f564a30" Jan 30 12:10:08 crc kubenswrapper[4869]: I0130 12:10:08.966934 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wj22b" Jan 30 12:10:08 crc kubenswrapper[4869]: I0130 12:10:08.991360 4869 scope.go:117] "RemoveContainer" containerID="b010589ab6ca6a61afa579ebd507e6f67eb0fddaa91e8adf96b7125a7d5010a4" Jan 30 12:10:09 crc kubenswrapper[4869]: I0130 12:10:09.001725 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wj22b"] Jan 30 12:10:09 crc kubenswrapper[4869]: I0130 12:10:09.011987 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wj22b"] Jan 30 12:10:09 crc kubenswrapper[4869]: I0130 12:10:09.017581 4869 scope.go:117] "RemoveContainer" containerID="50add266365cfc655d16a95c2987f079f0acf3eaf489a70c8e994a1fa055a982" Jan 30 12:10:09 crc kubenswrapper[4869]: I0130 12:10:09.042325 4869 scope.go:117] "RemoveContainer" containerID="2a33b908d5df17132e51ea60b49ee4904199aa9a430118619231ca148f564a30" Jan 30 12:10:09 crc kubenswrapper[4869]: E0130 12:10:09.042883 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a33b908d5df17132e51ea60b49ee4904199aa9a430118619231ca148f564a30\": container with ID starting with 2a33b908d5df17132e51ea60b49ee4904199aa9a430118619231ca148f564a30 not found: ID does not exist" containerID="2a33b908d5df17132e51ea60b49ee4904199aa9a430118619231ca148f564a30" Jan 30 12:10:09 crc kubenswrapper[4869]: I0130 12:10:09.042935 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a33b908d5df17132e51ea60b49ee4904199aa9a430118619231ca148f564a30"} err="failed to get container status \"2a33b908d5df17132e51ea60b49ee4904199aa9a430118619231ca148f564a30\": rpc error: code = NotFound desc = could not find container \"2a33b908d5df17132e51ea60b49ee4904199aa9a430118619231ca148f564a30\": container with ID starting with 2a33b908d5df17132e51ea60b49ee4904199aa9a430118619231ca148f564a30 not found: ID does not exist" Jan 30 12:10:09 crc kubenswrapper[4869]: I0130 12:10:09.042963 4869 scope.go:117] "RemoveContainer" containerID="b010589ab6ca6a61afa579ebd507e6f67eb0fddaa91e8adf96b7125a7d5010a4" Jan 30 12:10:09 crc kubenswrapper[4869]: E0130 12:10:09.043340 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b010589ab6ca6a61afa579ebd507e6f67eb0fddaa91e8adf96b7125a7d5010a4\": container with ID starting with b010589ab6ca6a61afa579ebd507e6f67eb0fddaa91e8adf96b7125a7d5010a4 not found: ID does not exist" containerID="b010589ab6ca6a61afa579ebd507e6f67eb0fddaa91e8adf96b7125a7d5010a4" Jan 30 12:10:09 crc kubenswrapper[4869]: I0130 12:10:09.043377 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b010589ab6ca6a61afa579ebd507e6f67eb0fddaa91e8adf96b7125a7d5010a4"} err="failed to get container status \"b010589ab6ca6a61afa579ebd507e6f67eb0fddaa91e8adf96b7125a7d5010a4\": rpc error: code = NotFound desc = could not find container \"b010589ab6ca6a61afa579ebd507e6f67eb0fddaa91e8adf96b7125a7d5010a4\": container with ID starting with b010589ab6ca6a61afa579ebd507e6f67eb0fddaa91e8adf96b7125a7d5010a4 not found: ID does not exist" Jan 30 12:10:09 crc kubenswrapper[4869]: I0130 12:10:09.043396 4869 scope.go:117] "RemoveContainer" containerID="50add266365cfc655d16a95c2987f079f0acf3eaf489a70c8e994a1fa055a982" Jan 30 12:10:09 crc kubenswrapper[4869]: E0130 12:10:09.043644 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50add266365cfc655d16a95c2987f079f0acf3eaf489a70c8e994a1fa055a982\": container with ID starting with 50add266365cfc655d16a95c2987f079f0acf3eaf489a70c8e994a1fa055a982 not found: ID does not exist" containerID="50add266365cfc655d16a95c2987f079f0acf3eaf489a70c8e994a1fa055a982" Jan 30 12:10:09 crc kubenswrapper[4869]: I0130 12:10:09.043670 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50add266365cfc655d16a95c2987f079f0acf3eaf489a70c8e994a1fa055a982"} err="failed to get container status \"50add266365cfc655d16a95c2987f079f0acf3eaf489a70c8e994a1fa055a982\": rpc error: code = NotFound desc = could not find container \"50add266365cfc655d16a95c2987f079f0acf3eaf489a70c8e994a1fa055a982\": container with ID starting with 50add266365cfc655d16a95c2987f079f0acf3eaf489a70c8e994a1fa055a982 not found: ID does not exist" Jan 30 12:10:10 crc kubenswrapper[4869]: I0130 12:10:10.145051 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8c82ad8-9c51-495b-9d1f-18cae9fd70ef" path="/var/lib/kubelet/pods/b8c82ad8-9c51-495b-9d1f-18cae9fd70ef/volumes" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.183082 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-swmwq"] Jan 30 12:10:19 crc kubenswrapper[4869]: E0130 12:10:19.184082 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8c82ad8-9c51-495b-9d1f-18cae9fd70ef" containerName="registry-server" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.184106 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8c82ad8-9c51-495b-9d1f-18cae9fd70ef" containerName="registry-server" Jan 30 12:10:19 crc kubenswrapper[4869]: E0130 12:10:19.184139 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8c82ad8-9c51-495b-9d1f-18cae9fd70ef" containerName="extract-utilities" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.184148 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8c82ad8-9c51-495b-9d1f-18cae9fd70ef" containerName="extract-utilities" Jan 30 12:10:19 crc kubenswrapper[4869]: E0130 12:10:19.184169 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8c82ad8-9c51-495b-9d1f-18cae9fd70ef" containerName="extract-content" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.184177 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8c82ad8-9c51-495b-9d1f-18cae9fd70ef" containerName="extract-content" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.184347 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8c82ad8-9c51-495b-9d1f-18cae9fd70ef" containerName="registry-server" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.185154 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d7b5456f5-swmwq" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.187496 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.187516 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.187899 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.191463 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-vbtzp" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.191676 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.199456 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-swmwq"] Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.378375 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qvfn\" (UniqueName: \"kubernetes.io/projected/43459845-1242-482b-a27c-739c09e86524-kube-api-access-9qvfn\") pod \"dnsmasq-dns-5d7b5456f5-swmwq\" (UID: \"43459845-1242-482b-a27c-739c09e86524\") " pod="openstack/dnsmasq-dns-5d7b5456f5-swmwq" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.378489 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43459845-1242-482b-a27c-739c09e86524-config\") pod \"dnsmasq-dns-5d7b5456f5-swmwq\" (UID: \"43459845-1242-482b-a27c-739c09e86524\") " pod="openstack/dnsmasq-dns-5d7b5456f5-swmwq" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.378547 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/43459845-1242-482b-a27c-739c09e86524-dns-svc\") pod \"dnsmasq-dns-5d7b5456f5-swmwq\" (UID: \"43459845-1242-482b-a27c-739c09e86524\") " pod="openstack/dnsmasq-dns-5d7b5456f5-swmwq" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.483566 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qvfn\" (UniqueName: \"kubernetes.io/projected/43459845-1242-482b-a27c-739c09e86524-kube-api-access-9qvfn\") pod \"dnsmasq-dns-5d7b5456f5-swmwq\" (UID: \"43459845-1242-482b-a27c-739c09e86524\") " pod="openstack/dnsmasq-dns-5d7b5456f5-swmwq" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.483919 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43459845-1242-482b-a27c-739c09e86524-config\") pod \"dnsmasq-dns-5d7b5456f5-swmwq\" (UID: \"43459845-1242-482b-a27c-739c09e86524\") " pod="openstack/dnsmasq-dns-5d7b5456f5-swmwq" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.484054 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/43459845-1242-482b-a27c-739c09e86524-dns-svc\") pod \"dnsmasq-dns-5d7b5456f5-swmwq\" (UID: \"43459845-1242-482b-a27c-739c09e86524\") " pod="openstack/dnsmasq-dns-5d7b5456f5-swmwq" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.485185 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/43459845-1242-482b-a27c-739c09e86524-dns-svc\") pod \"dnsmasq-dns-5d7b5456f5-swmwq\" (UID: \"43459845-1242-482b-a27c-739c09e86524\") " pod="openstack/dnsmasq-dns-5d7b5456f5-swmwq" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.485272 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43459845-1242-482b-a27c-739c09e86524-config\") pod \"dnsmasq-dns-5d7b5456f5-swmwq\" (UID: \"43459845-1242-482b-a27c-739c09e86524\") " pod="openstack/dnsmasq-dns-5d7b5456f5-swmwq" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.591581 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-2rtf2"] Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.647945 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-2rtf2"] Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.649635 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98ddfc8f-2rtf2" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.669378 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qvfn\" (UniqueName: \"kubernetes.io/projected/43459845-1242-482b-a27c-739c09e86524-kube-api-access-9qvfn\") pod \"dnsmasq-dns-5d7b5456f5-swmwq\" (UID: \"43459845-1242-482b-a27c-739c09e86524\") " pod="openstack/dnsmasq-dns-5d7b5456f5-swmwq" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.687881 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4619f34b-3016-4243-a2f0-c916b5514512-dns-svc\") pod \"dnsmasq-dns-98ddfc8f-2rtf2\" (UID: \"4619f34b-3016-4243-a2f0-c916b5514512\") " pod="openstack/dnsmasq-dns-98ddfc8f-2rtf2" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.687950 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4619f34b-3016-4243-a2f0-c916b5514512-config\") pod \"dnsmasq-dns-98ddfc8f-2rtf2\" (UID: \"4619f34b-3016-4243-a2f0-c916b5514512\") " pod="openstack/dnsmasq-dns-98ddfc8f-2rtf2" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.688108 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdz78\" (UniqueName: \"kubernetes.io/projected/4619f34b-3016-4243-a2f0-c916b5514512-kube-api-access-qdz78\") pod \"dnsmasq-dns-98ddfc8f-2rtf2\" (UID: \"4619f34b-3016-4243-a2f0-c916b5514512\") " pod="openstack/dnsmasq-dns-98ddfc8f-2rtf2" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.789186 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4619f34b-3016-4243-a2f0-c916b5514512-dns-svc\") pod \"dnsmasq-dns-98ddfc8f-2rtf2\" (UID: \"4619f34b-3016-4243-a2f0-c916b5514512\") " pod="openstack/dnsmasq-dns-98ddfc8f-2rtf2" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.789526 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4619f34b-3016-4243-a2f0-c916b5514512-config\") pod \"dnsmasq-dns-98ddfc8f-2rtf2\" (UID: \"4619f34b-3016-4243-a2f0-c916b5514512\") " pod="openstack/dnsmasq-dns-98ddfc8f-2rtf2" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.789574 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdz78\" (UniqueName: \"kubernetes.io/projected/4619f34b-3016-4243-a2f0-c916b5514512-kube-api-access-qdz78\") pod \"dnsmasq-dns-98ddfc8f-2rtf2\" (UID: \"4619f34b-3016-4243-a2f0-c916b5514512\") " pod="openstack/dnsmasq-dns-98ddfc8f-2rtf2" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.790574 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4619f34b-3016-4243-a2f0-c916b5514512-config\") pod \"dnsmasq-dns-98ddfc8f-2rtf2\" (UID: \"4619f34b-3016-4243-a2f0-c916b5514512\") " pod="openstack/dnsmasq-dns-98ddfc8f-2rtf2" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.790582 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4619f34b-3016-4243-a2f0-c916b5514512-dns-svc\") pod \"dnsmasq-dns-98ddfc8f-2rtf2\" (UID: \"4619f34b-3016-4243-a2f0-c916b5514512\") " pod="openstack/dnsmasq-dns-98ddfc8f-2rtf2" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.807808 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d7b5456f5-swmwq" Jan 30 12:10:19 crc kubenswrapper[4869]: I0130 12:10:19.810473 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdz78\" (UniqueName: \"kubernetes.io/projected/4619f34b-3016-4243-a2f0-c916b5514512-kube-api-access-qdz78\") pod \"dnsmasq-dns-98ddfc8f-2rtf2\" (UID: \"4619f34b-3016-4243-a2f0-c916b5514512\") " pod="openstack/dnsmasq-dns-98ddfc8f-2rtf2" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.091134 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98ddfc8f-2rtf2" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.266294 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-swmwq"] Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.361993 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.364521 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.367603 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.367656 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.367680 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-6kgfd" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.367825 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.368055 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.382905 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.503292 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a01a674e-c971-4c67-8418-5ebd661f84dc-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.503350 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a01a674e-c971-4c67-8418-5ebd661f84dc-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.503396 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a01a674e-c971-4c67-8418-5ebd661f84dc-pod-info\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.503417 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-2e055ccd-c768-4ae2-b309-495a63280ec9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2e055ccd-c768-4ae2-b309-495a63280ec9\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.503443 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a01a674e-c971-4c67-8418-5ebd661f84dc-server-conf\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.503494 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a01a674e-c971-4c67-8418-5ebd661f84dc-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.503534 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a01a674e-c971-4c67-8418-5ebd661f84dc-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.503566 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a01a674e-c971-4c67-8418-5ebd661f84dc-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.503584 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dn8mz\" (UniqueName: \"kubernetes.io/projected/a01a674e-c971-4c67-8418-5ebd661f84dc-kube-api-access-dn8mz\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.545624 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-2rtf2"] Jan 30 12:10:20 crc kubenswrapper[4869]: W0130 12:10:20.551920 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4619f34b_3016_4243_a2f0_c916b5514512.slice/crio-adea9865b14f8651d2a22fef13cc0dc6348bdc4e906d03cb714897c20b9043d2 WatchSource:0}: Error finding container adea9865b14f8651d2a22fef13cc0dc6348bdc4e906d03cb714897c20b9043d2: Status 404 returned error can't find the container with id adea9865b14f8651d2a22fef13cc0dc6348bdc4e906d03cb714897c20b9043d2 Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.604768 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a01a674e-c971-4c67-8418-5ebd661f84dc-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.604817 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a01a674e-c971-4c67-8418-5ebd661f84dc-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.604870 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a01a674e-c971-4c67-8418-5ebd661f84dc-pod-info\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.604911 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-2e055ccd-c768-4ae2-b309-495a63280ec9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2e055ccd-c768-4ae2-b309-495a63280ec9\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.604952 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a01a674e-c971-4c67-8418-5ebd661f84dc-server-conf\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.605013 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a01a674e-c971-4c67-8418-5ebd661f84dc-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.605063 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a01a674e-c971-4c67-8418-5ebd661f84dc-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.605105 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a01a674e-c971-4c67-8418-5ebd661f84dc-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.605133 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dn8mz\" (UniqueName: \"kubernetes.io/projected/a01a674e-c971-4c67-8418-5ebd661f84dc-kube-api-access-dn8mz\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.606127 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a01a674e-c971-4c67-8418-5ebd661f84dc-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.606504 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a01a674e-c971-4c67-8418-5ebd661f84dc-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.606773 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a01a674e-c971-4c67-8418-5ebd661f84dc-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.606812 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a01a674e-c971-4c67-8418-5ebd661f84dc-server-conf\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.610399 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a01a674e-c971-4c67-8418-5ebd661f84dc-pod-info\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.611352 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a01a674e-c971-4c67-8418-5ebd661f84dc-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.611682 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a01a674e-c971-4c67-8418-5ebd661f84dc-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.614877 4869 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.614923 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-2e055ccd-c768-4ae2-b309-495a63280ec9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2e055ccd-c768-4ae2-b309-495a63280ec9\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/21ec76f3ec42e2ecd6f85a7b4821af1c0eb541c56f5b6cd974b29ba9b4d436b8/globalmount\"" pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.625300 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dn8mz\" (UniqueName: \"kubernetes.io/projected/a01a674e-c971-4c67-8418-5ebd661f84dc-kube-api-access-dn8mz\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.645848 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-2e055ccd-c768-4ae2-b309-495a63280ec9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2e055ccd-c768-4ae2-b309-495a63280ec9\") pod \"rabbitmq-server-0\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.730676 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.731983 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.733518 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.736018 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.736057 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.736204 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.736292 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.736494 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-hjdtv" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.762760 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.911672 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ab16cd8a-d018-4192-be1c-27c1f22f359c-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.911800 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e0963118-064b-4753-9d52-6c6862da45a0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0963118-064b-4753-9d52-6c6862da45a0\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.911837 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ab16cd8a-d018-4192-be1c-27c1f22f359c-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.911879 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ab16cd8a-d018-4192-be1c-27c1f22f359c-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.911918 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ab16cd8a-d018-4192-be1c-27c1f22f359c-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.911961 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ab16cd8a-d018-4192-be1c-27c1f22f359c-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.911989 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzs6k\" (UniqueName: \"kubernetes.io/projected/ab16cd8a-d018-4192-be1c-27c1f22f359c-kube-api-access-wzs6k\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.912034 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ab16cd8a-d018-4192-be1c-27c1f22f359c-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:20 crc kubenswrapper[4869]: I0130 12:10:20.912057 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ab16cd8a-d018-4192-be1c-27c1f22f359c-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.013616 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ab16cd8a-d018-4192-be1c-27c1f22f359c-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.013703 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ab16cd8a-d018-4192-be1c-27c1f22f359c-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.013773 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ab16cd8a-d018-4192-be1c-27c1f22f359c-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.013797 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzs6k\" (UniqueName: \"kubernetes.io/projected/ab16cd8a-d018-4192-be1c-27c1f22f359c-kube-api-access-wzs6k\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.013854 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ab16cd8a-d018-4192-be1c-27c1f22f359c-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.013886 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ab16cd8a-d018-4192-be1c-27c1f22f359c-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.013939 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ab16cd8a-d018-4192-be1c-27c1f22f359c-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.013978 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e0963118-064b-4753-9d52-6c6862da45a0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0963118-064b-4753-9d52-6c6862da45a0\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.014012 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ab16cd8a-d018-4192-be1c-27c1f22f359c-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.014869 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ab16cd8a-d018-4192-be1c-27c1f22f359c-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.018106 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ab16cd8a-d018-4192-be1c-27c1f22f359c-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.018971 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ab16cd8a-d018-4192-be1c-27c1f22f359c-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.019585 4869 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.019611 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e0963118-064b-4753-9d52-6c6862da45a0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0963118-064b-4753-9d52-6c6862da45a0\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/572454f230dc02e3d14e57d1b1caae9d3d3194b06bb04c0d46fe1bb6ecd2a96b/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.019679 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ab16cd8a-d018-4192-be1c-27c1f22f359c-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.049136 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-2rtf2" event={"ID":"4619f34b-3016-4243-a2f0-c916b5514512","Type":"ContainerStarted","Data":"adea9865b14f8651d2a22fef13cc0dc6348bdc4e906d03cb714897c20b9043d2"} Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.054164 4869 generic.go:334] "Generic (PLEG): container finished" podID="43459845-1242-482b-a27c-739c09e86524" containerID="39ccf31f4b60ec819f8f11468f1c0eb8a1ab5f19438fc43f82ec99ece4f8a3b5" exitCode=0 Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.054212 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-swmwq" event={"ID":"43459845-1242-482b-a27c-739c09e86524","Type":"ContainerDied","Data":"39ccf31f4b60ec819f8f11468f1c0eb8a1ab5f19438fc43f82ec99ece4f8a3b5"} Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.054238 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-swmwq" event={"ID":"43459845-1242-482b-a27c-739c09e86524","Type":"ContainerStarted","Data":"0150ef1dbc811b7cd4559efcfdfc2b0e55d6cf2def5c3d64484cfd12dca9d92c"} Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.260809 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ab16cd8a-d018-4192-be1c-27c1f22f359c-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.260823 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ab16cd8a-d018-4192-be1c-27c1f22f359c-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.262072 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ab16cd8a-d018-4192-be1c-27c1f22f359c-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.262325 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzs6k\" (UniqueName: \"kubernetes.io/projected/ab16cd8a-d018-4192-be1c-27c1f22f359c-kube-api-access-wzs6k\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.279566 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.293294 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e0963118-064b-4753-9d52-6c6862da45a0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0963118-064b-4753-9d52-6c6862da45a0\") pod \"rabbitmq-cell1-server-0\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.437682 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.769963 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.770532 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.770601 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.771586 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4"} pod="openshift-machine-config-operator/machine-config-daemon-99lr2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.771670 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" containerID="cri-o://99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" gracePeriod=600 Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.870462 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.963691 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.966523 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 30 12:10:21 crc kubenswrapper[4869]: E0130 12:10:21.966572 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.969439 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.970881 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-p5dl5" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.971458 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.972086 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.982809 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 30 12:10:21 crc kubenswrapper[4869]: I0130 12:10:21.982936 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.064029 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ab16cd8a-d018-4192-be1c-27c1f22f359c","Type":"ContainerStarted","Data":"6c657111a02e2138699bbbda77031a620b1f83304a502858a3ebe443eb04ed5b"} Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.071139 4869 generic.go:334] "Generic (PLEG): container finished" podID="ef13186b-7f82-4025-97e3-d899be8c207f" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" exitCode=0 Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.071189 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerDied","Data":"99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4"} Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.071229 4869 scope.go:117] "RemoveContainer" containerID="f3c0492f240ef8079c5a536a55c71f07e73380e7d54376f9d12ab4006ba8c385" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.071680 4869 scope.go:117] "RemoveContainer" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" Jan 30 12:10:22 crc kubenswrapper[4869]: E0130 12:10:22.071944 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.074810 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a01a674e-c971-4c67-8418-5ebd661f84dc","Type":"ContainerStarted","Data":"de8716ac1ff59e4204a61139d3f2cc7d2b75d6b569e7bb66d9ae511ee61a5197"} Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.081226 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-swmwq" event={"ID":"43459845-1242-482b-a27c-739c09e86524","Type":"ContainerStarted","Data":"5ff1af4917eb2515ebfa827775a9843165b779e86e290241c63049b2ff327488"} Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.081682 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5d7b5456f5-swmwq" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.124116 4869 generic.go:334] "Generic (PLEG): container finished" podID="4619f34b-3016-4243-a2f0-c916b5514512" containerID="271886521af0936be29a1861c2c707a2594d720bf9852febe473f705ef739daa" exitCode=0 Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.124198 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-2rtf2" event={"ID":"4619f34b-3016-4243-a2f0-c916b5514512","Type":"ContainerDied","Data":"271886521af0936be29a1861c2c707a2594d720bf9852febe473f705ef739daa"} Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.136377 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4q775\" (UniqueName: \"kubernetes.io/projected/2d6f92ae-3351-43f9-ae75-ab887fdf402e-kube-api-access-4q775\") pod \"openstack-galera-0\" (UID: \"2d6f92ae-3351-43f9-ae75-ab887fdf402e\") " pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.136571 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5d7b5456f5-swmwq" podStartSLOduration=3.136555123 podStartE2EDuration="3.136555123s" podCreationTimestamp="2026-01-30 12:10:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:10:22.113863298 +0000 UTC m=+4572.663739374" watchObservedRunningTime="2026-01-30 12:10:22.136555123 +0000 UTC m=+4572.686431189" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.137130 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-48cb8072-bef7-4727-a052-79994a717612\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-48cb8072-bef7-4727-a052-79994a717612\") pod \"openstack-galera-0\" (UID: \"2d6f92ae-3351-43f9-ae75-ab887fdf402e\") " pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.137317 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2d6f92ae-3351-43f9-ae75-ab887fdf402e-config-data-generated\") pod \"openstack-galera-0\" (UID: \"2d6f92ae-3351-43f9-ae75-ab887fdf402e\") " pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.137437 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2d6f92ae-3351-43f9-ae75-ab887fdf402e-config-data-default\") pod \"openstack-galera-0\" (UID: \"2d6f92ae-3351-43f9-ae75-ab887fdf402e\") " pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.137606 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d6f92ae-3351-43f9-ae75-ab887fdf402e-operator-scripts\") pod \"openstack-galera-0\" (UID: \"2d6f92ae-3351-43f9-ae75-ab887fdf402e\") " pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.137786 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d6f92ae-3351-43f9-ae75-ab887fdf402e-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"2d6f92ae-3351-43f9-ae75-ab887fdf402e\") " pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.137939 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d6f92ae-3351-43f9-ae75-ab887fdf402e-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"2d6f92ae-3351-43f9-ae75-ab887fdf402e\") " pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.138146 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2d6f92ae-3351-43f9-ae75-ab887fdf402e-kolla-config\") pod \"openstack-galera-0\" (UID: \"2d6f92ae-3351-43f9-ae75-ab887fdf402e\") " pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.240235 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2d6f92ae-3351-43f9-ae75-ab887fdf402e-kolla-config\") pod \"openstack-galera-0\" (UID: \"2d6f92ae-3351-43f9-ae75-ab887fdf402e\") " pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.240557 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4q775\" (UniqueName: \"kubernetes.io/projected/2d6f92ae-3351-43f9-ae75-ab887fdf402e-kube-api-access-4q775\") pod \"openstack-galera-0\" (UID: \"2d6f92ae-3351-43f9-ae75-ab887fdf402e\") " pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.240835 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-48cb8072-bef7-4727-a052-79994a717612\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-48cb8072-bef7-4727-a052-79994a717612\") pod \"openstack-galera-0\" (UID: \"2d6f92ae-3351-43f9-ae75-ab887fdf402e\") " pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.240994 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2d6f92ae-3351-43f9-ae75-ab887fdf402e-config-data-generated\") pod \"openstack-galera-0\" (UID: \"2d6f92ae-3351-43f9-ae75-ab887fdf402e\") " pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.241079 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2d6f92ae-3351-43f9-ae75-ab887fdf402e-config-data-default\") pod \"openstack-galera-0\" (UID: \"2d6f92ae-3351-43f9-ae75-ab887fdf402e\") " pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.241175 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d6f92ae-3351-43f9-ae75-ab887fdf402e-operator-scripts\") pod \"openstack-galera-0\" (UID: \"2d6f92ae-3351-43f9-ae75-ab887fdf402e\") " pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.241261 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d6f92ae-3351-43f9-ae75-ab887fdf402e-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"2d6f92ae-3351-43f9-ae75-ab887fdf402e\") " pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.241402 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d6f92ae-3351-43f9-ae75-ab887fdf402e-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"2d6f92ae-3351-43f9-ae75-ab887fdf402e\") " pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.242193 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2d6f92ae-3351-43f9-ae75-ab887fdf402e-config-data-default\") pod \"openstack-galera-0\" (UID: \"2d6f92ae-3351-43f9-ae75-ab887fdf402e\") " pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.241256 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2d6f92ae-3351-43f9-ae75-ab887fdf402e-kolla-config\") pod \"openstack-galera-0\" (UID: \"2d6f92ae-3351-43f9-ae75-ab887fdf402e\") " pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.242696 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2d6f92ae-3351-43f9-ae75-ab887fdf402e-config-data-generated\") pod \"openstack-galera-0\" (UID: \"2d6f92ae-3351-43f9-ae75-ab887fdf402e\") " pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.243602 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d6f92ae-3351-43f9-ae75-ab887fdf402e-operator-scripts\") pod \"openstack-galera-0\" (UID: \"2d6f92ae-3351-43f9-ae75-ab887fdf402e\") " pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.244197 4869 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.244237 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-48cb8072-bef7-4727-a052-79994a717612\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-48cb8072-bef7-4727-a052-79994a717612\") pod \"openstack-galera-0\" (UID: \"2d6f92ae-3351-43f9-ae75-ab887fdf402e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d1587fd4c0c14de489b500e5cef47b20649852b5d1eae01abdb66c8a33714ecb/globalmount\"" pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.260767 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d6f92ae-3351-43f9-ae75-ab887fdf402e-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"2d6f92ae-3351-43f9-ae75-ab887fdf402e\") " pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.262474 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4q775\" (UniqueName: \"kubernetes.io/projected/2d6f92ae-3351-43f9-ae75-ab887fdf402e-kube-api-access-4q775\") pod \"openstack-galera-0\" (UID: \"2d6f92ae-3351-43f9-ae75-ab887fdf402e\") " pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.263477 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d6f92ae-3351-43f9-ae75-ab887fdf402e-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"2d6f92ae-3351-43f9-ae75-ab887fdf402e\") " pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.290089 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-48cb8072-bef7-4727-a052-79994a717612\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-48cb8072-bef7-4727-a052-79994a717612\") pod \"openstack-galera-0\" (UID: \"2d6f92ae-3351-43f9-ae75-ab887fdf402e\") " pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.359639 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.390741 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.391691 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.399206 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-r48ms" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.399414 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.413336 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.445054 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrpkn\" (UniqueName: \"kubernetes.io/projected/6248329a-abb5-42cd-b358-9fa425bfb39b-kube-api-access-zrpkn\") pod \"memcached-0\" (UID: \"6248329a-abb5-42cd-b358-9fa425bfb39b\") " pod="openstack/memcached-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.445617 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6248329a-abb5-42cd-b358-9fa425bfb39b-config-data\") pod \"memcached-0\" (UID: \"6248329a-abb5-42cd-b358-9fa425bfb39b\") " pod="openstack/memcached-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.445682 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6248329a-abb5-42cd-b358-9fa425bfb39b-kolla-config\") pod \"memcached-0\" (UID: \"6248329a-abb5-42cd-b358-9fa425bfb39b\") " pod="openstack/memcached-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.547665 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrpkn\" (UniqueName: \"kubernetes.io/projected/6248329a-abb5-42cd-b358-9fa425bfb39b-kube-api-access-zrpkn\") pod \"memcached-0\" (UID: \"6248329a-abb5-42cd-b358-9fa425bfb39b\") " pod="openstack/memcached-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.547788 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6248329a-abb5-42cd-b358-9fa425bfb39b-config-data\") pod \"memcached-0\" (UID: \"6248329a-abb5-42cd-b358-9fa425bfb39b\") " pod="openstack/memcached-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.547843 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6248329a-abb5-42cd-b358-9fa425bfb39b-kolla-config\") pod \"memcached-0\" (UID: \"6248329a-abb5-42cd-b358-9fa425bfb39b\") " pod="openstack/memcached-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.548737 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6248329a-abb5-42cd-b358-9fa425bfb39b-kolla-config\") pod \"memcached-0\" (UID: \"6248329a-abb5-42cd-b358-9fa425bfb39b\") " pod="openstack/memcached-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.549339 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6248329a-abb5-42cd-b358-9fa425bfb39b-config-data\") pod \"memcached-0\" (UID: \"6248329a-abb5-42cd-b358-9fa425bfb39b\") " pod="openstack/memcached-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.570099 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrpkn\" (UniqueName: \"kubernetes.io/projected/6248329a-abb5-42cd-b358-9fa425bfb39b-kube-api-access-zrpkn\") pod \"memcached-0\" (UID: \"6248329a-abb5-42cd-b358-9fa425bfb39b\") " pod="openstack/memcached-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.715579 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 30 12:10:22 crc kubenswrapper[4869]: I0130 12:10:22.902625 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.173352 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a01a674e-c971-4c67-8418-5ebd661f84dc","Type":"ContainerStarted","Data":"00bdf8238b21905e53b616516ef820ef34410e0f8599b86d258ad0867f5eb3d7"} Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.177676 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"2d6f92ae-3351-43f9-ae75-ab887fdf402e","Type":"ContainerStarted","Data":"c5bed8aa408d936459e399279cd80271246631cf79353de0d8d1e03541de9e24"} Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.180356 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-2rtf2" event={"ID":"4619f34b-3016-4243-a2f0-c916b5514512","Type":"ContainerStarted","Data":"aad8d63e7267555a41647b92283b1bbb68a56230d5ef47d244c9f5b4b468dc19"} Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.180549 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-98ddfc8f-2rtf2" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.201281 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.226639 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-98ddfc8f-2rtf2" podStartSLOduration=4.226620026 podStartE2EDuration="4.226620026s" podCreationTimestamp="2026-01-30 12:10:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:10:23.222474928 +0000 UTC m=+4573.772350994" watchObservedRunningTime="2026-01-30 12:10:23.226620026 +0000 UTC m=+4573.776496092" Jan 30 12:10:23 crc kubenswrapper[4869]: W0130 12:10:23.264002 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6248329a_abb5_42cd_b358_9fa425bfb39b.slice/crio-4546882a482bc16a939957f29b29e5ca46ded5ea96fa93c678addb783d56d997 WatchSource:0}: Error finding container 4546882a482bc16a939957f29b29e5ca46ded5ea96fa93c678addb783d56d997: Status 404 returned error can't find the container with id 4546882a482bc16a939957f29b29e5ca46ded5ea96fa93c678addb783d56d997 Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.616757 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.618491 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.620580 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-5b2vc" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.620884 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.621086 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.621446 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.630439 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.672339 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/3db80712-bc4e-4418-873f-de37b9970b48-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"3db80712-bc4e-4418-873f-de37b9970b48\") " pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.672438 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-cf24af04-009b-4000-abbe-7bac6fd35711\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cf24af04-009b-4000-abbe-7bac6fd35711\") pod \"openstack-cell1-galera-0\" (UID: \"3db80712-bc4e-4418-873f-de37b9970b48\") " pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.672485 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3db80712-bc4e-4418-873f-de37b9970b48-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"3db80712-bc4e-4418-873f-de37b9970b48\") " pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.672537 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3db80712-bc4e-4418-873f-de37b9970b48-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"3db80712-bc4e-4418-873f-de37b9970b48\") " pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.672600 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3db80712-bc4e-4418-873f-de37b9970b48-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"3db80712-bc4e-4418-873f-de37b9970b48\") " pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.672829 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3db80712-bc4e-4418-873f-de37b9970b48-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"3db80712-bc4e-4418-873f-de37b9970b48\") " pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.672897 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3db80712-bc4e-4418-873f-de37b9970b48-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"3db80712-bc4e-4418-873f-de37b9970b48\") " pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.673086 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22vxq\" (UniqueName: \"kubernetes.io/projected/3db80712-bc4e-4418-873f-de37b9970b48-kube-api-access-22vxq\") pod \"openstack-cell1-galera-0\" (UID: \"3db80712-bc4e-4418-873f-de37b9970b48\") " pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.774542 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3db80712-bc4e-4418-873f-de37b9970b48-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"3db80712-bc4e-4418-873f-de37b9970b48\") " pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.774624 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3db80712-bc4e-4418-873f-de37b9970b48-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"3db80712-bc4e-4418-873f-de37b9970b48\") " pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.774748 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22vxq\" (UniqueName: \"kubernetes.io/projected/3db80712-bc4e-4418-873f-de37b9970b48-kube-api-access-22vxq\") pod \"openstack-cell1-galera-0\" (UID: \"3db80712-bc4e-4418-873f-de37b9970b48\") " pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.774795 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/3db80712-bc4e-4418-873f-de37b9970b48-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"3db80712-bc4e-4418-873f-de37b9970b48\") " pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.774850 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-cf24af04-009b-4000-abbe-7bac6fd35711\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cf24af04-009b-4000-abbe-7bac6fd35711\") pod \"openstack-cell1-galera-0\" (UID: \"3db80712-bc4e-4418-873f-de37b9970b48\") " pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.774908 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3db80712-bc4e-4418-873f-de37b9970b48-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"3db80712-bc4e-4418-873f-de37b9970b48\") " pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.774946 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3db80712-bc4e-4418-873f-de37b9970b48-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"3db80712-bc4e-4418-873f-de37b9970b48\") " pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.775010 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3db80712-bc4e-4418-873f-de37b9970b48-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"3db80712-bc4e-4418-873f-de37b9970b48\") " pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.775898 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3db80712-bc4e-4418-873f-de37b9970b48-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"3db80712-bc4e-4418-873f-de37b9970b48\") " pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.776012 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3db80712-bc4e-4418-873f-de37b9970b48-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"3db80712-bc4e-4418-873f-de37b9970b48\") " pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.776017 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3db80712-bc4e-4418-873f-de37b9970b48-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"3db80712-bc4e-4418-873f-de37b9970b48\") " pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.778351 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3db80712-bc4e-4418-873f-de37b9970b48-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"3db80712-bc4e-4418-873f-de37b9970b48\") " pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.781177 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/3db80712-bc4e-4418-873f-de37b9970b48-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"3db80712-bc4e-4418-873f-de37b9970b48\") " pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.782445 4869 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.782489 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-cf24af04-009b-4000-abbe-7bac6fd35711\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cf24af04-009b-4000-abbe-7bac6fd35711\") pod \"openstack-cell1-galera-0\" (UID: \"3db80712-bc4e-4418-873f-de37b9970b48\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/cec64c5dd031298157151b4e56d44d0c0dbf761eabb0347a0de5dab1d1490873/globalmount\"" pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.782932 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3db80712-bc4e-4418-873f-de37b9970b48-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"3db80712-bc4e-4418-873f-de37b9970b48\") " pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.798780 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22vxq\" (UniqueName: \"kubernetes.io/projected/3db80712-bc4e-4418-873f-de37b9970b48-kube-api-access-22vxq\") pod \"openstack-cell1-galera-0\" (UID: \"3db80712-bc4e-4418-873f-de37b9970b48\") " pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.817480 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-cf24af04-009b-4000-abbe-7bac6fd35711\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cf24af04-009b-4000-abbe-7bac6fd35711\") pod \"openstack-cell1-galera-0\" (UID: \"3db80712-bc4e-4418-873f-de37b9970b48\") " pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:23 crc kubenswrapper[4869]: I0130 12:10:23.933326 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:24 crc kubenswrapper[4869]: I0130 12:10:24.191898 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ab16cd8a-d018-4192-be1c-27c1f22f359c","Type":"ContainerStarted","Data":"1e8633f6e9a6eaf5d95c64a62440869aa925b49e7b2cd87a4677c56359f6fc05"} Jan 30 12:10:24 crc kubenswrapper[4869]: I0130 12:10:24.195096 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"2d6f92ae-3351-43f9-ae75-ab887fdf402e","Type":"ContainerStarted","Data":"4d5bc85261ee31e45d316f43b0870c93cb2a2cffe76d9d150f4864ff94286c40"} Jan 30 12:10:24 crc kubenswrapper[4869]: I0130 12:10:24.198907 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"6248329a-abb5-42cd-b358-9fa425bfb39b","Type":"ContainerStarted","Data":"908a0d5b0334f54986efa533aa1692eae9a995a74fed9437860f0c86d969311f"} Jan 30 12:10:24 crc kubenswrapper[4869]: I0130 12:10:24.198968 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"6248329a-abb5-42cd-b358-9fa425bfb39b","Type":"ContainerStarted","Data":"4546882a482bc16a939957f29b29e5ca46ded5ea96fa93c678addb783d56d997"} Jan 30 12:10:24 crc kubenswrapper[4869]: I0130 12:10:24.199031 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Jan 30 12:10:24 crc kubenswrapper[4869]: I0130 12:10:24.283721 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.283663878 podStartE2EDuration="2.283663878s" podCreationTimestamp="2026-01-30 12:10:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:10:24.236455066 +0000 UTC m=+4574.786331142" watchObservedRunningTime="2026-01-30 12:10:24.283663878 +0000 UTC m=+4574.833539944" Jan 30 12:10:24 crc kubenswrapper[4869]: I0130 12:10:24.436322 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 30 12:10:24 crc kubenswrapper[4869]: W0130 12:10:24.449782 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3db80712_bc4e_4418_873f_de37b9970b48.slice/crio-0423b28c16c64a43fa3a06c6bb160cef037ad16dfc5ac623cc2cb0c80bc1c817 WatchSource:0}: Error finding container 0423b28c16c64a43fa3a06c6bb160cef037ad16dfc5ac623cc2cb0c80bc1c817: Status 404 returned error can't find the container with id 0423b28c16c64a43fa3a06c6bb160cef037ad16dfc5ac623cc2cb0c80bc1c817 Jan 30 12:10:25 crc kubenswrapper[4869]: I0130 12:10:25.206781 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"3db80712-bc4e-4418-873f-de37b9970b48","Type":"ContainerStarted","Data":"e61e8b938057d609c64b6a1ff1f64eccbb14fe8945df1d5afb2a9204f9bffe5a"} Jan 30 12:10:25 crc kubenswrapper[4869]: I0130 12:10:25.207039 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"3db80712-bc4e-4418-873f-de37b9970b48","Type":"ContainerStarted","Data":"0423b28c16c64a43fa3a06c6bb160cef037ad16dfc5ac623cc2cb0c80bc1c817"} Jan 30 12:10:26 crc kubenswrapper[4869]: I0130 12:10:26.635365 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-27rq7"] Jan 30 12:10:26 crc kubenswrapper[4869]: I0130 12:10:26.637383 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-27rq7" Jan 30 12:10:26 crc kubenswrapper[4869]: I0130 12:10:26.649367 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-27rq7"] Jan 30 12:10:26 crc kubenswrapper[4869]: I0130 12:10:26.735973 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b05fe87a-1657-4b08-a67a-17ef9d93abe4-catalog-content\") pod \"community-operators-27rq7\" (UID: \"b05fe87a-1657-4b08-a67a-17ef9d93abe4\") " pod="openshift-marketplace/community-operators-27rq7" Jan 30 12:10:26 crc kubenswrapper[4869]: I0130 12:10:26.736054 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vzk7\" (UniqueName: \"kubernetes.io/projected/b05fe87a-1657-4b08-a67a-17ef9d93abe4-kube-api-access-6vzk7\") pod \"community-operators-27rq7\" (UID: \"b05fe87a-1657-4b08-a67a-17ef9d93abe4\") " pod="openshift-marketplace/community-operators-27rq7" Jan 30 12:10:26 crc kubenswrapper[4869]: I0130 12:10:26.736539 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b05fe87a-1657-4b08-a67a-17ef9d93abe4-utilities\") pod \"community-operators-27rq7\" (UID: \"b05fe87a-1657-4b08-a67a-17ef9d93abe4\") " pod="openshift-marketplace/community-operators-27rq7" Jan 30 12:10:26 crc kubenswrapper[4869]: I0130 12:10:26.838227 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vzk7\" (UniqueName: \"kubernetes.io/projected/b05fe87a-1657-4b08-a67a-17ef9d93abe4-kube-api-access-6vzk7\") pod \"community-operators-27rq7\" (UID: \"b05fe87a-1657-4b08-a67a-17ef9d93abe4\") " pod="openshift-marketplace/community-operators-27rq7" Jan 30 12:10:26 crc kubenswrapper[4869]: I0130 12:10:26.838324 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b05fe87a-1657-4b08-a67a-17ef9d93abe4-utilities\") pod \"community-operators-27rq7\" (UID: \"b05fe87a-1657-4b08-a67a-17ef9d93abe4\") " pod="openshift-marketplace/community-operators-27rq7" Jan 30 12:10:26 crc kubenswrapper[4869]: I0130 12:10:26.838384 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b05fe87a-1657-4b08-a67a-17ef9d93abe4-catalog-content\") pod \"community-operators-27rq7\" (UID: \"b05fe87a-1657-4b08-a67a-17ef9d93abe4\") " pod="openshift-marketplace/community-operators-27rq7" Jan 30 12:10:26 crc kubenswrapper[4869]: I0130 12:10:26.838925 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b05fe87a-1657-4b08-a67a-17ef9d93abe4-utilities\") pod \"community-operators-27rq7\" (UID: \"b05fe87a-1657-4b08-a67a-17ef9d93abe4\") " pod="openshift-marketplace/community-operators-27rq7" Jan 30 12:10:26 crc kubenswrapper[4869]: I0130 12:10:26.838954 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b05fe87a-1657-4b08-a67a-17ef9d93abe4-catalog-content\") pod \"community-operators-27rq7\" (UID: \"b05fe87a-1657-4b08-a67a-17ef9d93abe4\") " pod="openshift-marketplace/community-operators-27rq7" Jan 30 12:10:26 crc kubenswrapper[4869]: I0130 12:10:26.857852 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vzk7\" (UniqueName: \"kubernetes.io/projected/b05fe87a-1657-4b08-a67a-17ef9d93abe4-kube-api-access-6vzk7\") pod \"community-operators-27rq7\" (UID: \"b05fe87a-1657-4b08-a67a-17ef9d93abe4\") " pod="openshift-marketplace/community-operators-27rq7" Jan 30 12:10:26 crc kubenswrapper[4869]: I0130 12:10:26.966641 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-27rq7" Jan 30 12:10:27 crc kubenswrapper[4869]: I0130 12:10:27.231724 4869 generic.go:334] "Generic (PLEG): container finished" podID="2d6f92ae-3351-43f9-ae75-ab887fdf402e" containerID="4d5bc85261ee31e45d316f43b0870c93cb2a2cffe76d9d150f4864ff94286c40" exitCode=0 Jan 30 12:10:27 crc kubenswrapper[4869]: I0130 12:10:27.231990 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"2d6f92ae-3351-43f9-ae75-ab887fdf402e","Type":"ContainerDied","Data":"4d5bc85261ee31e45d316f43b0870c93cb2a2cffe76d9d150f4864ff94286c40"} Jan 30 12:10:27 crc kubenswrapper[4869]: I0130 12:10:27.499638 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-27rq7"] Jan 30 12:10:27 crc kubenswrapper[4869]: W0130 12:10:27.502941 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb05fe87a_1657_4b08_a67a_17ef9d93abe4.slice/crio-32d957a4090f1febaec6011a2c681a585340deb4d16e5512e64620f5a42195bd WatchSource:0}: Error finding container 32d957a4090f1febaec6011a2c681a585340deb4d16e5512e64620f5a42195bd: Status 404 returned error can't find the container with id 32d957a4090f1febaec6011a2c681a585340deb4d16e5512e64620f5a42195bd Jan 30 12:10:28 crc kubenswrapper[4869]: I0130 12:10:28.241110 4869 generic.go:334] "Generic (PLEG): container finished" podID="b05fe87a-1657-4b08-a67a-17ef9d93abe4" containerID="75d26612a787390a679c7d8a0167e93bedca18e91134256943795cda4837636c" exitCode=0 Jan 30 12:10:28 crc kubenswrapper[4869]: I0130 12:10:28.241207 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-27rq7" event={"ID":"b05fe87a-1657-4b08-a67a-17ef9d93abe4","Type":"ContainerDied","Data":"75d26612a787390a679c7d8a0167e93bedca18e91134256943795cda4837636c"} Jan 30 12:10:28 crc kubenswrapper[4869]: I0130 12:10:28.241566 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-27rq7" event={"ID":"b05fe87a-1657-4b08-a67a-17ef9d93abe4","Type":"ContainerStarted","Data":"32d957a4090f1febaec6011a2c681a585340deb4d16e5512e64620f5a42195bd"} Jan 30 12:10:28 crc kubenswrapper[4869]: I0130 12:10:28.243922 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"2d6f92ae-3351-43f9-ae75-ab887fdf402e","Type":"ContainerStarted","Data":"d306a13bd365cab3907f63851ecc6f855f99a4cdf64de84ad250aa0aa93f6591"} Jan 30 12:10:28 crc kubenswrapper[4869]: I0130 12:10:28.245958 4869 generic.go:334] "Generic (PLEG): container finished" podID="3db80712-bc4e-4418-873f-de37b9970b48" containerID="e61e8b938057d609c64b6a1ff1f64eccbb14fe8945df1d5afb2a9204f9bffe5a" exitCode=0 Jan 30 12:10:28 crc kubenswrapper[4869]: I0130 12:10:28.246014 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"3db80712-bc4e-4418-873f-de37b9970b48","Type":"ContainerDied","Data":"e61e8b938057d609c64b6a1ff1f64eccbb14fe8945df1d5afb2a9204f9bffe5a"} Jan 30 12:10:28 crc kubenswrapper[4869]: I0130 12:10:28.305370 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=8.305020433 podStartE2EDuration="8.305020433s" podCreationTimestamp="2026-01-30 12:10:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:10:28.300453543 +0000 UTC m=+4578.850329619" watchObservedRunningTime="2026-01-30 12:10:28.305020433 +0000 UTC m=+4578.854896499" Jan 30 12:10:29 crc kubenswrapper[4869]: I0130 12:10:29.256799 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"3db80712-bc4e-4418-873f-de37b9970b48","Type":"ContainerStarted","Data":"9b751af6fa896b7433a06f9d1bef008e9dd8e2e0c13fd31d8bfce661eaaf75b8"} Jan 30 12:10:29 crc kubenswrapper[4869]: I0130 12:10:29.281234 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=7.281214098 podStartE2EDuration="7.281214098s" podCreationTimestamp="2026-01-30 12:10:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:10:29.276213256 +0000 UTC m=+4579.826089322" watchObservedRunningTime="2026-01-30 12:10:29.281214098 +0000 UTC m=+4579.831090164" Jan 30 12:10:29 crc kubenswrapper[4869]: I0130 12:10:29.809588 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5d7b5456f5-swmwq" Jan 30 12:10:30 crc kubenswrapper[4869]: I0130 12:10:30.093517 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-98ddfc8f-2rtf2" Jan 30 12:10:30 crc kubenswrapper[4869]: I0130 12:10:30.153246 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-swmwq"] Jan 30 12:10:30 crc kubenswrapper[4869]: I0130 12:10:30.264575 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5d7b5456f5-swmwq" podUID="43459845-1242-482b-a27c-739c09e86524" containerName="dnsmasq-dns" containerID="cri-o://5ff1af4917eb2515ebfa827775a9843165b779e86e290241c63049b2ff327488" gracePeriod=10 Jan 30 12:10:31 crc kubenswrapper[4869]: I0130 12:10:31.255015 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d7b5456f5-swmwq" Jan 30 12:10:31 crc kubenswrapper[4869]: I0130 12:10:31.280085 4869 generic.go:334] "Generic (PLEG): container finished" podID="43459845-1242-482b-a27c-739c09e86524" containerID="5ff1af4917eb2515ebfa827775a9843165b779e86e290241c63049b2ff327488" exitCode=0 Jan 30 12:10:31 crc kubenswrapper[4869]: I0130 12:10:31.280148 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d7b5456f5-swmwq" Jan 30 12:10:31 crc kubenswrapper[4869]: I0130 12:10:31.280137 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-swmwq" event={"ID":"43459845-1242-482b-a27c-739c09e86524","Type":"ContainerDied","Data":"5ff1af4917eb2515ebfa827775a9843165b779e86e290241c63049b2ff327488"} Jan 30 12:10:31 crc kubenswrapper[4869]: I0130 12:10:31.280230 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-swmwq" event={"ID":"43459845-1242-482b-a27c-739c09e86524","Type":"ContainerDied","Data":"0150ef1dbc811b7cd4559efcfdfc2b0e55d6cf2def5c3d64484cfd12dca9d92c"} Jan 30 12:10:31 crc kubenswrapper[4869]: I0130 12:10:31.280248 4869 scope.go:117] "RemoveContainer" containerID="5ff1af4917eb2515ebfa827775a9843165b779e86e290241c63049b2ff327488" Jan 30 12:10:31 crc kubenswrapper[4869]: I0130 12:10:31.290993 4869 generic.go:334] "Generic (PLEG): container finished" podID="b05fe87a-1657-4b08-a67a-17ef9d93abe4" containerID="fa5944377a81e15675695ce7fb2059b133e967570873c666b0123242a4c49436" exitCode=0 Jan 30 12:10:31 crc kubenswrapper[4869]: I0130 12:10:31.291046 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-27rq7" event={"ID":"b05fe87a-1657-4b08-a67a-17ef9d93abe4","Type":"ContainerDied","Data":"fa5944377a81e15675695ce7fb2059b133e967570873c666b0123242a4c49436"} Jan 30 12:10:31 crc kubenswrapper[4869]: I0130 12:10:31.317258 4869 scope.go:117] "RemoveContainer" containerID="39ccf31f4b60ec819f8f11468f1c0eb8a1ab5f19438fc43f82ec99ece4f8a3b5" Jan 30 12:10:31 crc kubenswrapper[4869]: I0130 12:10:31.430042 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43459845-1242-482b-a27c-739c09e86524-config\") pod \"43459845-1242-482b-a27c-739c09e86524\" (UID: \"43459845-1242-482b-a27c-739c09e86524\") " Jan 30 12:10:31 crc kubenswrapper[4869]: I0130 12:10:31.430266 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/43459845-1242-482b-a27c-739c09e86524-dns-svc\") pod \"43459845-1242-482b-a27c-739c09e86524\" (UID: \"43459845-1242-482b-a27c-739c09e86524\") " Jan 30 12:10:31 crc kubenswrapper[4869]: I0130 12:10:31.430372 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9qvfn\" (UniqueName: \"kubernetes.io/projected/43459845-1242-482b-a27c-739c09e86524-kube-api-access-9qvfn\") pod \"43459845-1242-482b-a27c-739c09e86524\" (UID: \"43459845-1242-482b-a27c-739c09e86524\") " Jan 30 12:10:31 crc kubenswrapper[4869]: I0130 12:10:31.660887 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43459845-1242-482b-a27c-739c09e86524-kube-api-access-9qvfn" (OuterVolumeSpecName: "kube-api-access-9qvfn") pod "43459845-1242-482b-a27c-739c09e86524" (UID: "43459845-1242-482b-a27c-739c09e86524"). InnerVolumeSpecName "kube-api-access-9qvfn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:10:31 crc kubenswrapper[4869]: I0130 12:10:31.668727 4869 scope.go:117] "RemoveContainer" containerID="5ff1af4917eb2515ebfa827775a9843165b779e86e290241c63049b2ff327488" Jan 30 12:10:31 crc kubenswrapper[4869]: E0130 12:10:31.669662 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ff1af4917eb2515ebfa827775a9843165b779e86e290241c63049b2ff327488\": container with ID starting with 5ff1af4917eb2515ebfa827775a9843165b779e86e290241c63049b2ff327488 not found: ID does not exist" containerID="5ff1af4917eb2515ebfa827775a9843165b779e86e290241c63049b2ff327488" Jan 30 12:10:31 crc kubenswrapper[4869]: I0130 12:10:31.669739 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ff1af4917eb2515ebfa827775a9843165b779e86e290241c63049b2ff327488"} err="failed to get container status \"5ff1af4917eb2515ebfa827775a9843165b779e86e290241c63049b2ff327488\": rpc error: code = NotFound desc = could not find container \"5ff1af4917eb2515ebfa827775a9843165b779e86e290241c63049b2ff327488\": container with ID starting with 5ff1af4917eb2515ebfa827775a9843165b779e86e290241c63049b2ff327488 not found: ID does not exist" Jan 30 12:10:31 crc kubenswrapper[4869]: I0130 12:10:31.669780 4869 scope.go:117] "RemoveContainer" containerID="39ccf31f4b60ec819f8f11468f1c0eb8a1ab5f19438fc43f82ec99ece4f8a3b5" Jan 30 12:10:31 crc kubenswrapper[4869]: E0130 12:10:31.670096 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39ccf31f4b60ec819f8f11468f1c0eb8a1ab5f19438fc43f82ec99ece4f8a3b5\": container with ID starting with 39ccf31f4b60ec819f8f11468f1c0eb8a1ab5f19438fc43f82ec99ece4f8a3b5 not found: ID does not exist" containerID="39ccf31f4b60ec819f8f11468f1c0eb8a1ab5f19438fc43f82ec99ece4f8a3b5" Jan 30 12:10:31 crc kubenswrapper[4869]: I0130 12:10:31.670129 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39ccf31f4b60ec819f8f11468f1c0eb8a1ab5f19438fc43f82ec99ece4f8a3b5"} err="failed to get container status \"39ccf31f4b60ec819f8f11468f1c0eb8a1ab5f19438fc43f82ec99ece4f8a3b5\": rpc error: code = NotFound desc = could not find container \"39ccf31f4b60ec819f8f11468f1c0eb8a1ab5f19438fc43f82ec99ece4f8a3b5\": container with ID starting with 39ccf31f4b60ec819f8f11468f1c0eb8a1ab5f19438fc43f82ec99ece4f8a3b5 not found: ID does not exist" Jan 30 12:10:31 crc kubenswrapper[4869]: I0130 12:10:31.689358 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43459845-1242-482b-a27c-739c09e86524-config" (OuterVolumeSpecName: "config") pod "43459845-1242-482b-a27c-739c09e86524" (UID: "43459845-1242-482b-a27c-739c09e86524"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 12:10:31 crc kubenswrapper[4869]: I0130 12:10:31.696260 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43459845-1242-482b-a27c-739c09e86524-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "43459845-1242-482b-a27c-739c09e86524" (UID: "43459845-1242-482b-a27c-739c09e86524"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 12:10:31 crc kubenswrapper[4869]: I0130 12:10:31.734366 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/43459845-1242-482b-a27c-739c09e86524-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 12:10:31 crc kubenswrapper[4869]: I0130 12:10:31.734417 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9qvfn\" (UniqueName: \"kubernetes.io/projected/43459845-1242-482b-a27c-739c09e86524-kube-api-access-9qvfn\") on node \"crc\" DevicePath \"\"" Jan 30 12:10:31 crc kubenswrapper[4869]: I0130 12:10:31.734433 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43459845-1242-482b-a27c-739c09e86524-config\") on node \"crc\" DevicePath \"\"" Jan 30 12:10:31 crc kubenswrapper[4869]: I0130 12:10:31.912361 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-swmwq"] Jan 30 12:10:31 crc kubenswrapper[4869]: I0130 12:10:31.919184 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-swmwq"] Jan 30 12:10:32 crc kubenswrapper[4869]: I0130 12:10:32.142112 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43459845-1242-482b-a27c-739c09e86524" path="/var/lib/kubelet/pods/43459845-1242-482b-a27c-739c09e86524/volumes" Jan 30 12:10:32 crc kubenswrapper[4869]: I0130 12:10:32.303738 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-27rq7" event={"ID":"b05fe87a-1657-4b08-a67a-17ef9d93abe4","Type":"ContainerStarted","Data":"edc861dcf8d01e9726688b72e3d9528a43b71723c3f2374f6b82c7c699cd3c75"} Jan 30 12:10:32 crc kubenswrapper[4869]: I0130 12:10:32.326879 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-27rq7" podStartSLOduration=2.557163172 podStartE2EDuration="6.326855201s" podCreationTimestamp="2026-01-30 12:10:26 +0000 UTC" firstStartedPulling="2026-01-30 12:10:28.243157474 +0000 UTC m=+4578.793033540" lastFinishedPulling="2026-01-30 12:10:32.012849503 +0000 UTC m=+4582.562725569" observedRunningTime="2026-01-30 12:10:32.326080699 +0000 UTC m=+4582.875956785" watchObservedRunningTime="2026-01-30 12:10:32.326855201 +0000 UTC m=+4582.876731267" Jan 30 12:10:32 crc kubenswrapper[4869]: I0130 12:10:32.360111 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Jan 30 12:10:32 crc kubenswrapper[4869]: I0130 12:10:32.360178 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Jan 30 12:10:32 crc kubenswrapper[4869]: I0130 12:10:32.717770 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Jan 30 12:10:33 crc kubenswrapper[4869]: I0130 12:10:33.934028 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:33 crc kubenswrapper[4869]: I0130 12:10:33.934071 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:34 crc kubenswrapper[4869]: I0130 12:10:34.651311 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Jan 30 12:10:34 crc kubenswrapper[4869]: I0130 12:10:34.717172 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Jan 30 12:10:36 crc kubenswrapper[4869]: I0130 12:10:36.215956 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:36 crc kubenswrapper[4869]: I0130 12:10:36.285023 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Jan 30 12:10:36 crc kubenswrapper[4869]: I0130 12:10:36.967567 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-27rq7" Jan 30 12:10:36 crc kubenswrapper[4869]: I0130 12:10:36.967616 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-27rq7" Jan 30 12:10:37 crc kubenswrapper[4869]: I0130 12:10:37.012631 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-27rq7" Jan 30 12:10:37 crc kubenswrapper[4869]: I0130 12:10:37.133210 4869 scope.go:117] "RemoveContainer" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" Jan 30 12:10:37 crc kubenswrapper[4869]: E0130 12:10:37.133508 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:10:37 crc kubenswrapper[4869]: I0130 12:10:37.391797 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-27rq7" Jan 30 12:10:37 crc kubenswrapper[4869]: I0130 12:10:37.439648 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-27rq7"] Jan 30 12:10:39 crc kubenswrapper[4869]: I0130 12:10:39.360815 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-27rq7" podUID="b05fe87a-1657-4b08-a67a-17ef9d93abe4" containerName="registry-server" containerID="cri-o://edc861dcf8d01e9726688b72e3d9528a43b71723c3f2374f6b82c7c699cd3c75" gracePeriod=2 Jan 30 12:10:40 crc kubenswrapper[4869]: I0130 12:10:40.371120 4869 generic.go:334] "Generic (PLEG): container finished" podID="b05fe87a-1657-4b08-a67a-17ef9d93abe4" containerID="edc861dcf8d01e9726688b72e3d9528a43b71723c3f2374f6b82c7c699cd3c75" exitCode=0 Jan 30 12:10:40 crc kubenswrapper[4869]: I0130 12:10:40.371189 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-27rq7" event={"ID":"b05fe87a-1657-4b08-a67a-17ef9d93abe4","Type":"ContainerDied","Data":"edc861dcf8d01e9726688b72e3d9528a43b71723c3f2374f6b82c7c699cd3c75"} Jan 30 12:10:40 crc kubenswrapper[4869]: I0130 12:10:40.451639 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-27rq7" Jan 30 12:10:40 crc kubenswrapper[4869]: I0130 12:10:40.487137 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6vzk7\" (UniqueName: \"kubernetes.io/projected/b05fe87a-1657-4b08-a67a-17ef9d93abe4-kube-api-access-6vzk7\") pod \"b05fe87a-1657-4b08-a67a-17ef9d93abe4\" (UID: \"b05fe87a-1657-4b08-a67a-17ef9d93abe4\") " Jan 30 12:10:40 crc kubenswrapper[4869]: I0130 12:10:40.487237 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b05fe87a-1657-4b08-a67a-17ef9d93abe4-catalog-content\") pod \"b05fe87a-1657-4b08-a67a-17ef9d93abe4\" (UID: \"b05fe87a-1657-4b08-a67a-17ef9d93abe4\") " Jan 30 12:10:40 crc kubenswrapper[4869]: I0130 12:10:40.487289 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b05fe87a-1657-4b08-a67a-17ef9d93abe4-utilities\") pod \"b05fe87a-1657-4b08-a67a-17ef9d93abe4\" (UID: \"b05fe87a-1657-4b08-a67a-17ef9d93abe4\") " Jan 30 12:10:40 crc kubenswrapper[4869]: I0130 12:10:40.488675 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b05fe87a-1657-4b08-a67a-17ef9d93abe4-utilities" (OuterVolumeSpecName: "utilities") pod "b05fe87a-1657-4b08-a67a-17ef9d93abe4" (UID: "b05fe87a-1657-4b08-a67a-17ef9d93abe4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 12:10:40 crc kubenswrapper[4869]: I0130 12:10:40.496033 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b05fe87a-1657-4b08-a67a-17ef9d93abe4-kube-api-access-6vzk7" (OuterVolumeSpecName: "kube-api-access-6vzk7") pod "b05fe87a-1657-4b08-a67a-17ef9d93abe4" (UID: "b05fe87a-1657-4b08-a67a-17ef9d93abe4"). InnerVolumeSpecName "kube-api-access-6vzk7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:10:40 crc kubenswrapper[4869]: I0130 12:10:40.558203 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b05fe87a-1657-4b08-a67a-17ef9d93abe4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b05fe87a-1657-4b08-a67a-17ef9d93abe4" (UID: "b05fe87a-1657-4b08-a67a-17ef9d93abe4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 12:10:40 crc kubenswrapper[4869]: I0130 12:10:40.588819 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b05fe87a-1657-4b08-a67a-17ef9d93abe4-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 12:10:40 crc kubenswrapper[4869]: I0130 12:10:40.588862 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b05fe87a-1657-4b08-a67a-17ef9d93abe4-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 12:10:40 crc kubenswrapper[4869]: I0130 12:10:40.588875 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6vzk7\" (UniqueName: \"kubernetes.io/projected/b05fe87a-1657-4b08-a67a-17ef9d93abe4-kube-api-access-6vzk7\") on node \"crc\" DevicePath \"\"" Jan 30 12:10:40 crc kubenswrapper[4869]: I0130 12:10:40.920278 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-8j4gj"] Jan 30 12:10:40 crc kubenswrapper[4869]: E0130 12:10:40.920740 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43459845-1242-482b-a27c-739c09e86524" containerName="dnsmasq-dns" Jan 30 12:10:40 crc kubenswrapper[4869]: I0130 12:10:40.920763 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="43459845-1242-482b-a27c-739c09e86524" containerName="dnsmasq-dns" Jan 30 12:10:40 crc kubenswrapper[4869]: E0130 12:10:40.920784 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43459845-1242-482b-a27c-739c09e86524" containerName="init" Jan 30 12:10:40 crc kubenswrapper[4869]: I0130 12:10:40.920790 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="43459845-1242-482b-a27c-739c09e86524" containerName="init" Jan 30 12:10:40 crc kubenswrapper[4869]: E0130 12:10:40.920802 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b05fe87a-1657-4b08-a67a-17ef9d93abe4" containerName="extract-utilities" Jan 30 12:10:40 crc kubenswrapper[4869]: I0130 12:10:40.920809 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b05fe87a-1657-4b08-a67a-17ef9d93abe4" containerName="extract-utilities" Jan 30 12:10:40 crc kubenswrapper[4869]: E0130 12:10:40.920822 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b05fe87a-1657-4b08-a67a-17ef9d93abe4" containerName="extract-content" Jan 30 12:10:40 crc kubenswrapper[4869]: I0130 12:10:40.920828 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b05fe87a-1657-4b08-a67a-17ef9d93abe4" containerName="extract-content" Jan 30 12:10:40 crc kubenswrapper[4869]: E0130 12:10:40.920847 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b05fe87a-1657-4b08-a67a-17ef9d93abe4" containerName="registry-server" Jan 30 12:10:40 crc kubenswrapper[4869]: I0130 12:10:40.920852 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b05fe87a-1657-4b08-a67a-17ef9d93abe4" containerName="registry-server" Jan 30 12:10:40 crc kubenswrapper[4869]: I0130 12:10:40.921030 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b05fe87a-1657-4b08-a67a-17ef9d93abe4" containerName="registry-server" Jan 30 12:10:40 crc kubenswrapper[4869]: I0130 12:10:40.921057 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="43459845-1242-482b-a27c-739c09e86524" containerName="dnsmasq-dns" Jan 30 12:10:40 crc kubenswrapper[4869]: I0130 12:10:40.921616 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-8j4gj" Jan 30 12:10:40 crc kubenswrapper[4869]: I0130 12:10:40.929520 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Jan 30 12:10:40 crc kubenswrapper[4869]: I0130 12:10:40.931584 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-8j4gj"] Jan 30 12:10:40 crc kubenswrapper[4869]: I0130 12:10:40.995383 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjbwm\" (UniqueName: \"kubernetes.io/projected/0fa55406-fa70-445b-b64f-d4bbfb652f6a-kube-api-access-vjbwm\") pod \"root-account-create-update-8j4gj\" (UID: \"0fa55406-fa70-445b-b64f-d4bbfb652f6a\") " pod="openstack/root-account-create-update-8j4gj" Jan 30 12:10:40 crc kubenswrapper[4869]: I0130 12:10:40.995453 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fa55406-fa70-445b-b64f-d4bbfb652f6a-operator-scripts\") pod \"root-account-create-update-8j4gj\" (UID: \"0fa55406-fa70-445b-b64f-d4bbfb652f6a\") " pod="openstack/root-account-create-update-8j4gj" Jan 30 12:10:41 crc kubenswrapper[4869]: I0130 12:10:41.097007 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjbwm\" (UniqueName: \"kubernetes.io/projected/0fa55406-fa70-445b-b64f-d4bbfb652f6a-kube-api-access-vjbwm\") pod \"root-account-create-update-8j4gj\" (UID: \"0fa55406-fa70-445b-b64f-d4bbfb652f6a\") " pod="openstack/root-account-create-update-8j4gj" Jan 30 12:10:41 crc kubenswrapper[4869]: I0130 12:10:41.097078 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fa55406-fa70-445b-b64f-d4bbfb652f6a-operator-scripts\") pod \"root-account-create-update-8j4gj\" (UID: \"0fa55406-fa70-445b-b64f-d4bbfb652f6a\") " pod="openstack/root-account-create-update-8j4gj" Jan 30 12:10:41 crc kubenswrapper[4869]: I0130 12:10:41.097935 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fa55406-fa70-445b-b64f-d4bbfb652f6a-operator-scripts\") pod \"root-account-create-update-8j4gj\" (UID: \"0fa55406-fa70-445b-b64f-d4bbfb652f6a\") " pod="openstack/root-account-create-update-8j4gj" Jan 30 12:10:41 crc kubenswrapper[4869]: I0130 12:10:41.115790 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjbwm\" (UniqueName: \"kubernetes.io/projected/0fa55406-fa70-445b-b64f-d4bbfb652f6a-kube-api-access-vjbwm\") pod \"root-account-create-update-8j4gj\" (UID: \"0fa55406-fa70-445b-b64f-d4bbfb652f6a\") " pod="openstack/root-account-create-update-8j4gj" Jan 30 12:10:41 crc kubenswrapper[4869]: I0130 12:10:41.238474 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-8j4gj" Jan 30 12:10:41 crc kubenswrapper[4869]: I0130 12:10:41.382810 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-27rq7" event={"ID":"b05fe87a-1657-4b08-a67a-17ef9d93abe4","Type":"ContainerDied","Data":"32d957a4090f1febaec6011a2c681a585340deb4d16e5512e64620f5a42195bd"} Jan 30 12:10:41 crc kubenswrapper[4869]: I0130 12:10:41.383082 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-27rq7" Jan 30 12:10:41 crc kubenswrapper[4869]: I0130 12:10:41.383160 4869 scope.go:117] "RemoveContainer" containerID="edc861dcf8d01e9726688b72e3d9528a43b71723c3f2374f6b82c7c699cd3c75" Jan 30 12:10:41 crc kubenswrapper[4869]: I0130 12:10:41.404277 4869 scope.go:117] "RemoveContainer" containerID="fa5944377a81e15675695ce7fb2059b133e967570873c666b0123242a4c49436" Jan 30 12:10:41 crc kubenswrapper[4869]: I0130 12:10:41.420647 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-27rq7"] Jan 30 12:10:41 crc kubenswrapper[4869]: I0130 12:10:41.433786 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-27rq7"] Jan 30 12:10:41 crc kubenswrapper[4869]: I0130 12:10:41.434690 4869 scope.go:117] "RemoveContainer" containerID="75d26612a787390a679c7d8a0167e93bedca18e91134256943795cda4837636c" Jan 30 12:10:41 crc kubenswrapper[4869]: I0130 12:10:41.659329 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-8j4gj"] Jan 30 12:10:41 crc kubenswrapper[4869]: W0130 12:10:41.772218 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0fa55406_fa70_445b_b64f_d4bbfb652f6a.slice/crio-a05d51331a7adc46c1e4499569a7ee9134af63de9f0917e45ce1bf73a1738798 WatchSource:0}: Error finding container a05d51331a7adc46c1e4499569a7ee9134af63de9f0917e45ce1bf73a1738798: Status 404 returned error can't find the container with id a05d51331a7adc46c1e4499569a7ee9134af63de9f0917e45ce1bf73a1738798 Jan 30 12:10:42 crc kubenswrapper[4869]: I0130 12:10:42.144780 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b05fe87a-1657-4b08-a67a-17ef9d93abe4" path="/var/lib/kubelet/pods/b05fe87a-1657-4b08-a67a-17ef9d93abe4/volumes" Jan 30 12:10:42 crc kubenswrapper[4869]: I0130 12:10:42.392006 4869 generic.go:334] "Generic (PLEG): container finished" podID="0fa55406-fa70-445b-b64f-d4bbfb652f6a" containerID="68b551b931c11eb40df709c47115a0d7e0a186cdbab37c847b08e3b95bd7178d" exitCode=0 Jan 30 12:10:42 crc kubenswrapper[4869]: I0130 12:10:42.392085 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-8j4gj" event={"ID":"0fa55406-fa70-445b-b64f-d4bbfb652f6a","Type":"ContainerDied","Data":"68b551b931c11eb40df709c47115a0d7e0a186cdbab37c847b08e3b95bd7178d"} Jan 30 12:10:42 crc kubenswrapper[4869]: I0130 12:10:42.392122 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-8j4gj" event={"ID":"0fa55406-fa70-445b-b64f-d4bbfb652f6a","Type":"ContainerStarted","Data":"a05d51331a7adc46c1e4499569a7ee9134af63de9f0917e45ce1bf73a1738798"} Jan 30 12:10:43 crc kubenswrapper[4869]: I0130 12:10:43.691875 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-8j4gj" Jan 30 12:10:43 crc kubenswrapper[4869]: I0130 12:10:43.740643 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fa55406-fa70-445b-b64f-d4bbfb652f6a-operator-scripts\") pod \"0fa55406-fa70-445b-b64f-d4bbfb652f6a\" (UID: \"0fa55406-fa70-445b-b64f-d4bbfb652f6a\") " Jan 30 12:10:43 crc kubenswrapper[4869]: I0130 12:10:43.740774 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vjbwm\" (UniqueName: \"kubernetes.io/projected/0fa55406-fa70-445b-b64f-d4bbfb652f6a-kube-api-access-vjbwm\") pod \"0fa55406-fa70-445b-b64f-d4bbfb652f6a\" (UID: \"0fa55406-fa70-445b-b64f-d4bbfb652f6a\") " Jan 30 12:10:43 crc kubenswrapper[4869]: I0130 12:10:43.742486 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0fa55406-fa70-445b-b64f-d4bbfb652f6a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0fa55406-fa70-445b-b64f-d4bbfb652f6a" (UID: "0fa55406-fa70-445b-b64f-d4bbfb652f6a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 12:10:43 crc kubenswrapper[4869]: I0130 12:10:43.749696 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fa55406-fa70-445b-b64f-d4bbfb652f6a-kube-api-access-vjbwm" (OuterVolumeSpecName: "kube-api-access-vjbwm") pod "0fa55406-fa70-445b-b64f-d4bbfb652f6a" (UID: "0fa55406-fa70-445b-b64f-d4bbfb652f6a"). InnerVolumeSpecName "kube-api-access-vjbwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:10:43 crc kubenswrapper[4869]: I0130 12:10:43.842770 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vjbwm\" (UniqueName: \"kubernetes.io/projected/0fa55406-fa70-445b-b64f-d4bbfb652f6a-kube-api-access-vjbwm\") on node \"crc\" DevicePath \"\"" Jan 30 12:10:43 crc kubenswrapper[4869]: I0130 12:10:43.842808 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fa55406-fa70-445b-b64f-d4bbfb652f6a-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 12:10:44 crc kubenswrapper[4869]: I0130 12:10:44.408536 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-8j4gj" event={"ID":"0fa55406-fa70-445b-b64f-d4bbfb652f6a","Type":"ContainerDied","Data":"a05d51331a7adc46c1e4499569a7ee9134af63de9f0917e45ce1bf73a1738798"} Jan 30 12:10:44 crc kubenswrapper[4869]: I0130 12:10:44.408817 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a05d51331a7adc46c1e4499569a7ee9134af63de9f0917e45ce1bf73a1738798" Jan 30 12:10:44 crc kubenswrapper[4869]: I0130 12:10:44.408597 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-8j4gj" Jan 30 12:10:47 crc kubenswrapper[4869]: I0130 12:10:47.569481 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-8j4gj"] Jan 30 12:10:47 crc kubenswrapper[4869]: I0130 12:10:47.575135 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-8j4gj"] Jan 30 12:10:48 crc kubenswrapper[4869]: I0130 12:10:48.141484 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fa55406-fa70-445b-b64f-d4bbfb652f6a" path="/var/lib/kubelet/pods/0fa55406-fa70-445b-b64f-d4bbfb652f6a/volumes" Jan 30 12:10:50 crc kubenswrapper[4869]: I0130 12:10:50.138378 4869 scope.go:117] "RemoveContainer" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" Jan 30 12:10:50 crc kubenswrapper[4869]: E0130 12:10:50.139031 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:10:52 crc kubenswrapper[4869]: I0130 12:10:52.583553 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-md2pp"] Jan 30 12:10:52 crc kubenswrapper[4869]: E0130 12:10:52.584238 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fa55406-fa70-445b-b64f-d4bbfb652f6a" containerName="mariadb-account-create-update" Jan 30 12:10:52 crc kubenswrapper[4869]: I0130 12:10:52.584255 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fa55406-fa70-445b-b64f-d4bbfb652f6a" containerName="mariadb-account-create-update" Jan 30 12:10:52 crc kubenswrapper[4869]: I0130 12:10:52.584436 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fa55406-fa70-445b-b64f-d4bbfb652f6a" containerName="mariadb-account-create-update" Jan 30 12:10:52 crc kubenswrapper[4869]: I0130 12:10:52.585434 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-md2pp" Jan 30 12:10:52 crc kubenswrapper[4869]: I0130 12:10:52.589788 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Jan 30 12:10:52 crc kubenswrapper[4869]: I0130 12:10:52.594895 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-md2pp"] Jan 30 12:10:52 crc kubenswrapper[4869]: I0130 12:10:52.775988 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0a7b3b5c-5d53-4333-83c6-5dceec6a4eae-operator-scripts\") pod \"root-account-create-update-md2pp\" (UID: \"0a7b3b5c-5d53-4333-83c6-5dceec6a4eae\") " pod="openstack/root-account-create-update-md2pp" Jan 30 12:10:52 crc kubenswrapper[4869]: I0130 12:10:52.776136 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m89mh\" (UniqueName: \"kubernetes.io/projected/0a7b3b5c-5d53-4333-83c6-5dceec6a4eae-kube-api-access-m89mh\") pod \"root-account-create-update-md2pp\" (UID: \"0a7b3b5c-5d53-4333-83c6-5dceec6a4eae\") " pod="openstack/root-account-create-update-md2pp" Jan 30 12:10:52 crc kubenswrapper[4869]: I0130 12:10:52.877647 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m89mh\" (UniqueName: \"kubernetes.io/projected/0a7b3b5c-5d53-4333-83c6-5dceec6a4eae-kube-api-access-m89mh\") pod \"root-account-create-update-md2pp\" (UID: \"0a7b3b5c-5d53-4333-83c6-5dceec6a4eae\") " pod="openstack/root-account-create-update-md2pp" Jan 30 12:10:52 crc kubenswrapper[4869]: I0130 12:10:52.877826 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0a7b3b5c-5d53-4333-83c6-5dceec6a4eae-operator-scripts\") pod \"root-account-create-update-md2pp\" (UID: \"0a7b3b5c-5d53-4333-83c6-5dceec6a4eae\") " pod="openstack/root-account-create-update-md2pp" Jan 30 12:10:52 crc kubenswrapper[4869]: I0130 12:10:52.880657 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0a7b3b5c-5d53-4333-83c6-5dceec6a4eae-operator-scripts\") pod \"root-account-create-update-md2pp\" (UID: \"0a7b3b5c-5d53-4333-83c6-5dceec6a4eae\") " pod="openstack/root-account-create-update-md2pp" Jan 30 12:10:52 crc kubenswrapper[4869]: I0130 12:10:52.900424 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m89mh\" (UniqueName: \"kubernetes.io/projected/0a7b3b5c-5d53-4333-83c6-5dceec6a4eae-kube-api-access-m89mh\") pod \"root-account-create-update-md2pp\" (UID: \"0a7b3b5c-5d53-4333-83c6-5dceec6a4eae\") " pod="openstack/root-account-create-update-md2pp" Jan 30 12:10:52 crc kubenswrapper[4869]: I0130 12:10:52.902610 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-md2pp" Jan 30 12:10:53 crc kubenswrapper[4869]: I0130 12:10:53.348313 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-md2pp"] Jan 30 12:10:53 crc kubenswrapper[4869]: I0130 12:10:53.468725 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-md2pp" event={"ID":"0a7b3b5c-5d53-4333-83c6-5dceec6a4eae","Type":"ContainerStarted","Data":"c4ab15956cb2480ff4fd00f6a9f779f6b7261933cc0125fa3ee6c762812ff790"} Jan 30 12:10:54 crc kubenswrapper[4869]: I0130 12:10:54.477653 4869 generic.go:334] "Generic (PLEG): container finished" podID="0a7b3b5c-5d53-4333-83c6-5dceec6a4eae" containerID="835745b4c080ac9d80bcd47a88e6b218d6a4b3abf85884a69d72fe0aefdd01be" exitCode=0 Jan 30 12:10:54 crc kubenswrapper[4869]: I0130 12:10:54.477742 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-md2pp" event={"ID":"0a7b3b5c-5d53-4333-83c6-5dceec6a4eae","Type":"ContainerDied","Data":"835745b4c080ac9d80bcd47a88e6b218d6a4b3abf85884a69d72fe0aefdd01be"} Jan 30 12:10:55 crc kubenswrapper[4869]: I0130 12:10:55.486321 4869 generic.go:334] "Generic (PLEG): container finished" podID="a01a674e-c971-4c67-8418-5ebd661f84dc" containerID="00bdf8238b21905e53b616516ef820ef34410e0f8599b86d258ad0867f5eb3d7" exitCode=0 Jan 30 12:10:55 crc kubenswrapper[4869]: I0130 12:10:55.486426 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a01a674e-c971-4c67-8418-5ebd661f84dc","Type":"ContainerDied","Data":"00bdf8238b21905e53b616516ef820ef34410e0f8599b86d258ad0867f5eb3d7"} Jan 30 12:10:55 crc kubenswrapper[4869]: I0130 12:10:55.831762 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-md2pp" Jan 30 12:10:55 crc kubenswrapper[4869]: I0130 12:10:55.927702 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m89mh\" (UniqueName: \"kubernetes.io/projected/0a7b3b5c-5d53-4333-83c6-5dceec6a4eae-kube-api-access-m89mh\") pod \"0a7b3b5c-5d53-4333-83c6-5dceec6a4eae\" (UID: \"0a7b3b5c-5d53-4333-83c6-5dceec6a4eae\") " Jan 30 12:10:55 crc kubenswrapper[4869]: I0130 12:10:55.928151 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0a7b3b5c-5d53-4333-83c6-5dceec6a4eae-operator-scripts\") pod \"0a7b3b5c-5d53-4333-83c6-5dceec6a4eae\" (UID: \"0a7b3b5c-5d53-4333-83c6-5dceec6a4eae\") " Jan 30 12:10:55 crc kubenswrapper[4869]: I0130 12:10:55.929192 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a7b3b5c-5d53-4333-83c6-5dceec6a4eae-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0a7b3b5c-5d53-4333-83c6-5dceec6a4eae" (UID: "0a7b3b5c-5d53-4333-83c6-5dceec6a4eae"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 12:10:55 crc kubenswrapper[4869]: I0130 12:10:55.932522 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a7b3b5c-5d53-4333-83c6-5dceec6a4eae-kube-api-access-m89mh" (OuterVolumeSpecName: "kube-api-access-m89mh") pod "0a7b3b5c-5d53-4333-83c6-5dceec6a4eae" (UID: "0a7b3b5c-5d53-4333-83c6-5dceec6a4eae"). InnerVolumeSpecName "kube-api-access-m89mh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:10:56 crc kubenswrapper[4869]: I0130 12:10:56.030044 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m89mh\" (UniqueName: \"kubernetes.io/projected/0a7b3b5c-5d53-4333-83c6-5dceec6a4eae-kube-api-access-m89mh\") on node \"crc\" DevicePath \"\"" Jan 30 12:10:56 crc kubenswrapper[4869]: I0130 12:10:56.030093 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0a7b3b5c-5d53-4333-83c6-5dceec6a4eae-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 12:10:56 crc kubenswrapper[4869]: I0130 12:10:56.494759 4869 generic.go:334] "Generic (PLEG): container finished" podID="ab16cd8a-d018-4192-be1c-27c1f22f359c" containerID="1e8633f6e9a6eaf5d95c64a62440869aa925b49e7b2cd87a4677c56359f6fc05" exitCode=0 Jan 30 12:10:56 crc kubenswrapper[4869]: I0130 12:10:56.494877 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ab16cd8a-d018-4192-be1c-27c1f22f359c","Type":"ContainerDied","Data":"1e8633f6e9a6eaf5d95c64a62440869aa925b49e7b2cd87a4677c56359f6fc05"} Jan 30 12:10:56 crc kubenswrapper[4869]: I0130 12:10:56.497431 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a01a674e-c971-4c67-8418-5ebd661f84dc","Type":"ContainerStarted","Data":"714fa9199ad28cbbd24f545071833de02fa9f86b14a4af50dc35e3d30fd3ea02"} Jan 30 12:10:56 crc kubenswrapper[4869]: I0130 12:10:56.497667 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Jan 30 12:10:56 crc kubenswrapper[4869]: I0130 12:10:56.499854 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-md2pp" event={"ID":"0a7b3b5c-5d53-4333-83c6-5dceec6a4eae","Type":"ContainerDied","Data":"c4ab15956cb2480ff4fd00f6a9f779f6b7261933cc0125fa3ee6c762812ff790"} Jan 30 12:10:56 crc kubenswrapper[4869]: I0130 12:10:56.499879 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c4ab15956cb2480ff4fd00f6a9f779f6b7261933cc0125fa3ee6c762812ff790" Jan 30 12:10:56 crc kubenswrapper[4869]: I0130 12:10:56.499923 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-md2pp" Jan 30 12:10:56 crc kubenswrapper[4869]: I0130 12:10:56.540589 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.540566327 podStartE2EDuration="37.540566327s" podCreationTimestamp="2026-01-30 12:10:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:10:56.532611841 +0000 UTC m=+4607.082487907" watchObservedRunningTime="2026-01-30 12:10:56.540566327 +0000 UTC m=+4607.090442393" Jan 30 12:10:58 crc kubenswrapper[4869]: I0130 12:10:58.522136 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ab16cd8a-d018-4192-be1c-27c1f22f359c","Type":"ContainerStarted","Data":"31a7edab5cc21fbfb319fbb80dd7a427388fd5ade9116f26f9fea589bab4af52"} Jan 30 12:10:58 crc kubenswrapper[4869]: I0130 12:10:58.523860 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:10:58 crc kubenswrapper[4869]: I0130 12:10:58.558916 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=39.558891892 podStartE2EDuration="39.558891892s" podCreationTimestamp="2026-01-30 12:10:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:10:58.557372629 +0000 UTC m=+4609.107248715" watchObservedRunningTime="2026-01-30 12:10:58.558891892 +0000 UTC m=+4609.108767958" Jan 30 12:11:01 crc kubenswrapper[4869]: I0130 12:11:01.133699 4869 scope.go:117] "RemoveContainer" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" Jan 30 12:11:01 crc kubenswrapper[4869]: E0130 12:11:01.134363 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:11:10 crc kubenswrapper[4869]: I0130 12:11:10.733909 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Jan 30 12:11:11 crc kubenswrapper[4869]: I0130 12:11:11.441011 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:14 crc kubenswrapper[4869]: I0130 12:11:14.133618 4869 scope.go:117] "RemoveContainer" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" Jan 30 12:11:14 crc kubenswrapper[4869]: E0130 12:11:14.134130 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:11:16 crc kubenswrapper[4869]: I0130 12:11:16.346253 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-jzdnb"] Jan 30 12:11:16 crc kubenswrapper[4869]: E0130 12:11:16.348374 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a7b3b5c-5d53-4333-83c6-5dceec6a4eae" containerName="mariadb-account-create-update" Jan 30 12:11:16 crc kubenswrapper[4869]: I0130 12:11:16.348403 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a7b3b5c-5d53-4333-83c6-5dceec6a4eae" containerName="mariadb-account-create-update" Jan 30 12:11:16 crc kubenswrapper[4869]: I0130 12:11:16.348648 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a7b3b5c-5d53-4333-83c6-5dceec6a4eae" containerName="mariadb-account-create-update" Jan 30 12:11:16 crc kubenswrapper[4869]: I0130 12:11:16.349555 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7946d7b9-jzdnb" Jan 30 12:11:16 crc kubenswrapper[4869]: I0130 12:11:16.361559 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-jzdnb"] Jan 30 12:11:16 crc kubenswrapper[4869]: I0130 12:11:16.447952 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/140c446e-0248-45f5-9bc8-d09918522fdb-config\") pod \"dnsmasq-dns-5b7946d7b9-jzdnb\" (UID: \"140c446e-0248-45f5-9bc8-d09918522fdb\") " pod="openstack/dnsmasq-dns-5b7946d7b9-jzdnb" Jan 30 12:11:16 crc kubenswrapper[4869]: I0130 12:11:16.448035 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clzlt\" (UniqueName: \"kubernetes.io/projected/140c446e-0248-45f5-9bc8-d09918522fdb-kube-api-access-clzlt\") pod \"dnsmasq-dns-5b7946d7b9-jzdnb\" (UID: \"140c446e-0248-45f5-9bc8-d09918522fdb\") " pod="openstack/dnsmasq-dns-5b7946d7b9-jzdnb" Jan 30 12:11:16 crc kubenswrapper[4869]: I0130 12:11:16.448079 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/140c446e-0248-45f5-9bc8-d09918522fdb-dns-svc\") pod \"dnsmasq-dns-5b7946d7b9-jzdnb\" (UID: \"140c446e-0248-45f5-9bc8-d09918522fdb\") " pod="openstack/dnsmasq-dns-5b7946d7b9-jzdnb" Jan 30 12:11:16 crc kubenswrapper[4869]: I0130 12:11:16.549934 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/140c446e-0248-45f5-9bc8-d09918522fdb-config\") pod \"dnsmasq-dns-5b7946d7b9-jzdnb\" (UID: \"140c446e-0248-45f5-9bc8-d09918522fdb\") " pod="openstack/dnsmasq-dns-5b7946d7b9-jzdnb" Jan 30 12:11:16 crc kubenswrapper[4869]: I0130 12:11:16.550012 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clzlt\" (UniqueName: \"kubernetes.io/projected/140c446e-0248-45f5-9bc8-d09918522fdb-kube-api-access-clzlt\") pod \"dnsmasq-dns-5b7946d7b9-jzdnb\" (UID: \"140c446e-0248-45f5-9bc8-d09918522fdb\") " pod="openstack/dnsmasq-dns-5b7946d7b9-jzdnb" Jan 30 12:11:16 crc kubenswrapper[4869]: I0130 12:11:16.550046 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/140c446e-0248-45f5-9bc8-d09918522fdb-dns-svc\") pod \"dnsmasq-dns-5b7946d7b9-jzdnb\" (UID: \"140c446e-0248-45f5-9bc8-d09918522fdb\") " pod="openstack/dnsmasq-dns-5b7946d7b9-jzdnb" Jan 30 12:11:16 crc kubenswrapper[4869]: I0130 12:11:16.551615 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/140c446e-0248-45f5-9bc8-d09918522fdb-config\") pod \"dnsmasq-dns-5b7946d7b9-jzdnb\" (UID: \"140c446e-0248-45f5-9bc8-d09918522fdb\") " pod="openstack/dnsmasq-dns-5b7946d7b9-jzdnb" Jan 30 12:11:16 crc kubenswrapper[4869]: I0130 12:11:16.551787 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/140c446e-0248-45f5-9bc8-d09918522fdb-dns-svc\") pod \"dnsmasq-dns-5b7946d7b9-jzdnb\" (UID: \"140c446e-0248-45f5-9bc8-d09918522fdb\") " pod="openstack/dnsmasq-dns-5b7946d7b9-jzdnb" Jan 30 12:11:16 crc kubenswrapper[4869]: I0130 12:11:16.575931 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clzlt\" (UniqueName: \"kubernetes.io/projected/140c446e-0248-45f5-9bc8-d09918522fdb-kube-api-access-clzlt\") pod \"dnsmasq-dns-5b7946d7b9-jzdnb\" (UID: \"140c446e-0248-45f5-9bc8-d09918522fdb\") " pod="openstack/dnsmasq-dns-5b7946d7b9-jzdnb" Jan 30 12:11:16 crc kubenswrapper[4869]: I0130 12:11:16.668565 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7946d7b9-jzdnb" Jan 30 12:11:17 crc kubenswrapper[4869]: I0130 12:11:17.208818 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 30 12:11:17 crc kubenswrapper[4869]: I0130 12:11:17.278287 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-jzdnb"] Jan 30 12:11:17 crc kubenswrapper[4869]: I0130 12:11:17.657286 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-jzdnb" event={"ID":"140c446e-0248-45f5-9bc8-d09918522fdb","Type":"ContainerStarted","Data":"da0ce924857998b2923138c0820c57c3ff26942f3de242d37ab51c00d782ab4e"} Jan 30 12:11:17 crc kubenswrapper[4869]: I0130 12:11:17.843481 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 30 12:11:18 crc kubenswrapper[4869]: I0130 12:11:18.667064 4869 generic.go:334] "Generic (PLEG): container finished" podID="140c446e-0248-45f5-9bc8-d09918522fdb" containerID="6083312d796fb1a9afa7b33634c9703f7a823ccd00012cab1592fdcba52a25fd" exitCode=0 Jan 30 12:11:18 crc kubenswrapper[4869]: I0130 12:11:18.667127 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-jzdnb" event={"ID":"140c446e-0248-45f5-9bc8-d09918522fdb","Type":"ContainerDied","Data":"6083312d796fb1a9afa7b33634c9703f7a823ccd00012cab1592fdcba52a25fd"} Jan 30 12:11:19 crc kubenswrapper[4869]: I0130 12:11:19.675587 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-jzdnb" event={"ID":"140c446e-0248-45f5-9bc8-d09918522fdb","Type":"ContainerStarted","Data":"091dbbe029d970b8542689d13d35d6397858626ab2c4d213c9bba11748c3cf84"} Jan 30 12:11:19 crc kubenswrapper[4869]: I0130 12:11:19.675991 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b7946d7b9-jzdnb" Jan 30 12:11:19 crc kubenswrapper[4869]: I0130 12:11:19.692337 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b7946d7b9-jzdnb" podStartSLOduration=3.692314813 podStartE2EDuration="3.692314813s" podCreationTimestamp="2026-01-30 12:11:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:11:19.689789312 +0000 UTC m=+4630.239665378" watchObservedRunningTime="2026-01-30 12:11:19.692314813 +0000 UTC m=+4630.242190879" Jan 30 12:11:19 crc kubenswrapper[4869]: I0130 12:11:19.857397 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="ab16cd8a-d018-4192-be1c-27c1f22f359c" containerName="rabbitmq" containerID="cri-o://31a7edab5cc21fbfb319fbb80dd7a427388fd5ade9116f26f9fea589bab4af52" gracePeriod=604798 Jan 30 12:11:19 crc kubenswrapper[4869]: I0130 12:11:19.857436 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="a01a674e-c971-4c67-8418-5ebd661f84dc" containerName="rabbitmq" containerID="cri-o://714fa9199ad28cbbd24f545071833de02fa9f86b14a4af50dc35e3d30fd3ea02" gracePeriod=604798 Jan 30 12:11:20 crc kubenswrapper[4869]: I0130 12:11:20.731939 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="a01a674e-c971-4c67-8418-5ebd661f84dc" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.238:5672: connect: connection refused" Jan 30 12:11:21 crc kubenswrapper[4869]: I0130 12:11:21.438655 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="ab16cd8a-d018-4192-be1c-27c1f22f359c" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.239:5672: connect: connection refused" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.427677 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.494374 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.594072 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a01a674e-c971-4c67-8418-5ebd661f84dc-erlang-cookie-secret\") pod \"a01a674e-c971-4c67-8418-5ebd661f84dc\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.594640 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a01a674e-c971-4c67-8418-5ebd661f84dc-rabbitmq-confd\") pod \"a01a674e-c971-4c67-8418-5ebd661f84dc\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.594983 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0963118-064b-4753-9d52-6c6862da45a0\") pod \"ab16cd8a-d018-4192-be1c-27c1f22f359c\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.595027 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wzs6k\" (UniqueName: \"kubernetes.io/projected/ab16cd8a-d018-4192-be1c-27c1f22f359c-kube-api-access-wzs6k\") pod \"ab16cd8a-d018-4192-be1c-27c1f22f359c\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.595091 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ab16cd8a-d018-4192-be1c-27c1f22f359c-rabbitmq-erlang-cookie\") pod \"ab16cd8a-d018-4192-be1c-27c1f22f359c\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.595847 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab16cd8a-d018-4192-be1c-27c1f22f359c-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "ab16cd8a-d018-4192-be1c-27c1f22f359c" (UID: "ab16cd8a-d018-4192-be1c-27c1f22f359c"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.595882 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a01a674e-c971-4c67-8418-5ebd661f84dc-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "a01a674e-c971-4c67-8418-5ebd661f84dc" (UID: "a01a674e-c971-4c67-8418-5ebd661f84dc"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.595194 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a01a674e-c971-4c67-8418-5ebd661f84dc-rabbitmq-erlang-cookie\") pod \"a01a674e-c971-4c67-8418-5ebd661f84dc\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.596049 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a01a674e-c971-4c67-8418-5ebd661f84dc-server-conf\") pod \"a01a674e-c971-4c67-8418-5ebd661f84dc\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.596135 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a01a674e-c971-4c67-8418-5ebd661f84dc-rabbitmq-plugins\") pod \"a01a674e-c971-4c67-8418-5ebd661f84dc\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.596162 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ab16cd8a-d018-4192-be1c-27c1f22f359c-server-conf\") pod \"ab16cd8a-d018-4192-be1c-27c1f22f359c\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.596269 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2e055ccd-c768-4ae2-b309-495a63280ec9\") pod \"a01a674e-c971-4c67-8418-5ebd661f84dc\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.596300 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a01a674e-c971-4c67-8418-5ebd661f84dc-pod-info\") pod \"a01a674e-c971-4c67-8418-5ebd661f84dc\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.596324 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ab16cd8a-d018-4192-be1c-27c1f22f359c-pod-info\") pod \"ab16cd8a-d018-4192-be1c-27c1f22f359c\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.596348 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a01a674e-c971-4c67-8418-5ebd661f84dc-plugins-conf\") pod \"a01a674e-c971-4c67-8418-5ebd661f84dc\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.596379 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ab16cd8a-d018-4192-be1c-27c1f22f359c-erlang-cookie-secret\") pod \"ab16cd8a-d018-4192-be1c-27c1f22f359c\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.596412 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ab16cd8a-d018-4192-be1c-27c1f22f359c-rabbitmq-plugins\") pod \"ab16cd8a-d018-4192-be1c-27c1f22f359c\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.596436 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dn8mz\" (UniqueName: \"kubernetes.io/projected/a01a674e-c971-4c67-8418-5ebd661f84dc-kube-api-access-dn8mz\") pod \"a01a674e-c971-4c67-8418-5ebd661f84dc\" (UID: \"a01a674e-c971-4c67-8418-5ebd661f84dc\") " Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.596461 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ab16cd8a-d018-4192-be1c-27c1f22f359c-rabbitmq-confd\") pod \"ab16cd8a-d018-4192-be1c-27c1f22f359c\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.596477 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ab16cd8a-d018-4192-be1c-27c1f22f359c-plugins-conf\") pod \"ab16cd8a-d018-4192-be1c-27c1f22f359c\" (UID: \"ab16cd8a-d018-4192-be1c-27c1f22f359c\") " Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.596994 4869 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ab16cd8a-d018-4192-be1c-27c1f22f359c-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.597009 4869 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a01a674e-c971-4c67-8418-5ebd661f84dc-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.600351 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a01a674e-c971-4c67-8418-5ebd661f84dc-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "a01a674e-c971-4c67-8418-5ebd661f84dc" (UID: "a01a674e-c971-4c67-8418-5ebd661f84dc"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.600539 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab16cd8a-d018-4192-be1c-27c1f22f359c-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "ab16cd8a-d018-4192-be1c-27c1f22f359c" (UID: "ab16cd8a-d018-4192-be1c-27c1f22f359c"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.600934 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab16cd8a-d018-4192-be1c-27c1f22f359c-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "ab16cd8a-d018-4192-be1c-27c1f22f359c" (UID: "ab16cd8a-d018-4192-be1c-27c1f22f359c"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.601126 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a01a674e-c971-4c67-8418-5ebd661f84dc-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "a01a674e-c971-4c67-8418-5ebd661f84dc" (UID: "a01a674e-c971-4c67-8418-5ebd661f84dc"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.606103 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab16cd8a-d018-4192-be1c-27c1f22f359c-kube-api-access-wzs6k" (OuterVolumeSpecName: "kube-api-access-wzs6k") pod "ab16cd8a-d018-4192-be1c-27c1f22f359c" (UID: "ab16cd8a-d018-4192-be1c-27c1f22f359c"). InnerVolumeSpecName "kube-api-access-wzs6k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.606213 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a01a674e-c971-4c67-8418-5ebd661f84dc-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "a01a674e-c971-4c67-8418-5ebd661f84dc" (UID: "a01a674e-c971-4c67-8418-5ebd661f84dc"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.609578 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a01a674e-c971-4c67-8418-5ebd661f84dc-kube-api-access-dn8mz" (OuterVolumeSpecName: "kube-api-access-dn8mz") pod "a01a674e-c971-4c67-8418-5ebd661f84dc" (UID: "a01a674e-c971-4c67-8418-5ebd661f84dc"). InnerVolumeSpecName "kube-api-access-dn8mz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.610683 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab16cd8a-d018-4192-be1c-27c1f22f359c-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "ab16cd8a-d018-4192-be1c-27c1f22f359c" (UID: "ab16cd8a-d018-4192-be1c-27c1f22f359c"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.615951 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/a01a674e-c971-4c67-8418-5ebd661f84dc-pod-info" (OuterVolumeSpecName: "pod-info") pod "a01a674e-c971-4c67-8418-5ebd661f84dc" (UID: "a01a674e-c971-4c67-8418-5ebd661f84dc"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.649072 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/ab16cd8a-d018-4192-be1c-27c1f22f359c-pod-info" (OuterVolumeSpecName: "pod-info") pod "ab16cd8a-d018-4192-be1c-27c1f22f359c" (UID: "ab16cd8a-d018-4192-be1c-27c1f22f359c"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.649296 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a01a674e-c971-4c67-8418-5ebd661f84dc-server-conf" (OuterVolumeSpecName: "server-conf") pod "a01a674e-c971-4c67-8418-5ebd661f84dc" (UID: "a01a674e-c971-4c67-8418-5ebd661f84dc"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.649535 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2e055ccd-c768-4ae2-b309-495a63280ec9" (OuterVolumeSpecName: "persistence") pod "a01a674e-c971-4c67-8418-5ebd661f84dc" (UID: "a01a674e-c971-4c67-8418-5ebd661f84dc"). InnerVolumeSpecName "pvc-2e055ccd-c768-4ae2-b309-495a63280ec9". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.649545 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0963118-064b-4753-9d52-6c6862da45a0" (OuterVolumeSpecName: "persistence") pod "ab16cd8a-d018-4192-be1c-27c1f22f359c" (UID: "ab16cd8a-d018-4192-be1c-27c1f22f359c"). InnerVolumeSpecName "pvc-e0963118-064b-4753-9d52-6c6862da45a0". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.670387 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab16cd8a-d018-4192-be1c-27c1f22f359c-server-conf" (OuterVolumeSpecName: "server-conf") pod "ab16cd8a-d018-4192-be1c-27c1f22f359c" (UID: "ab16cd8a-d018-4192-be1c-27c1f22f359c"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.687432 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b7946d7b9-jzdnb" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.699924 4869 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ab16cd8a-d018-4192-be1c-27c1f22f359c-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.700097 4869 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ab16cd8a-d018-4192-be1c-27c1f22f359c-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.700333 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dn8mz\" (UniqueName: \"kubernetes.io/projected/a01a674e-c971-4c67-8418-5ebd661f84dc-kube-api-access-dn8mz\") on node \"crc\" DevicePath \"\"" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.700367 4869 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ab16cd8a-d018-4192-be1c-27c1f22f359c-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.700377 4869 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a01a674e-c971-4c67-8418-5ebd661f84dc-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.700410 4869 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-e0963118-064b-4753-9d52-6c6862da45a0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0963118-064b-4753-9d52-6c6862da45a0\") on node \"crc\" " Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.700427 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wzs6k\" (UniqueName: \"kubernetes.io/projected/ab16cd8a-d018-4192-be1c-27c1f22f359c-kube-api-access-wzs6k\") on node \"crc\" DevicePath \"\"" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.700439 4869 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a01a674e-c971-4c67-8418-5ebd661f84dc-server-conf\") on node \"crc\" DevicePath \"\"" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.700450 4869 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a01a674e-c971-4c67-8418-5ebd661f84dc-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.700463 4869 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ab16cd8a-d018-4192-be1c-27c1f22f359c-server-conf\") on node \"crc\" DevicePath \"\"" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.700480 4869 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-2e055ccd-c768-4ae2-b309-495a63280ec9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2e055ccd-c768-4ae2-b309-495a63280ec9\") on node \"crc\" " Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.700491 4869 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a01a674e-c971-4c67-8418-5ebd661f84dc-pod-info\") on node \"crc\" DevicePath \"\"" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.700517 4869 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ab16cd8a-d018-4192-be1c-27c1f22f359c-pod-info\") on node \"crc\" DevicePath \"\"" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.700535 4869 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a01a674e-c971-4c67-8418-5ebd661f84dc-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.716071 4869 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.716367 4869 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-2e055ccd-c768-4ae2-b309-495a63280ec9" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2e055ccd-c768-4ae2-b309-495a63280ec9") on node "crc" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.730622 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a01a674e-c971-4c67-8418-5ebd661f84dc-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "a01a674e-c971-4c67-8418-5ebd661f84dc" (UID: "a01a674e-c971-4c67-8418-5ebd661f84dc"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.730855 4869 generic.go:334] "Generic (PLEG): container finished" podID="a01a674e-c971-4c67-8418-5ebd661f84dc" containerID="714fa9199ad28cbbd24f545071833de02fa9f86b14a4af50dc35e3d30fd3ea02" exitCode=0 Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.731021 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.732405 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a01a674e-c971-4c67-8418-5ebd661f84dc","Type":"ContainerDied","Data":"714fa9199ad28cbbd24f545071833de02fa9f86b14a4af50dc35e3d30fd3ea02"} Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.732464 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a01a674e-c971-4c67-8418-5ebd661f84dc","Type":"ContainerDied","Data":"de8716ac1ff59e4204a61139d3f2cc7d2b75d6b569e7bb66d9ae511ee61a5197"} Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.732485 4869 scope.go:117] "RemoveContainer" containerID="714fa9199ad28cbbd24f545071833de02fa9f86b14a4af50dc35e3d30fd3ea02" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.733888 4869 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.735208 4869 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-e0963118-064b-4753-9d52-6c6862da45a0" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0963118-064b-4753-9d52-6c6862da45a0") on node "crc" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.736447 4869 generic.go:334] "Generic (PLEG): container finished" podID="ab16cd8a-d018-4192-be1c-27c1f22f359c" containerID="31a7edab5cc21fbfb319fbb80dd7a427388fd5ade9116f26f9fea589bab4af52" exitCode=0 Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.736511 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.736528 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ab16cd8a-d018-4192-be1c-27c1f22f359c","Type":"ContainerDied","Data":"31a7edab5cc21fbfb319fbb80dd7a427388fd5ade9116f26f9fea589bab4af52"} Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.736565 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ab16cd8a-d018-4192-be1c-27c1f22f359c","Type":"ContainerDied","Data":"6c657111a02e2138699bbbda77031a620b1f83304a502858a3ebe443eb04ed5b"} Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.757085 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab16cd8a-d018-4192-be1c-27c1f22f359c-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "ab16cd8a-d018-4192-be1c-27c1f22f359c" (UID: "ab16cd8a-d018-4192-be1c-27c1f22f359c"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.765612 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-2rtf2"] Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.765904 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-98ddfc8f-2rtf2" podUID="4619f34b-3016-4243-a2f0-c916b5514512" containerName="dnsmasq-dns" containerID="cri-o://aad8d63e7267555a41647b92283b1bbb68a56230d5ef47d244c9f5b4b468dc19" gracePeriod=10 Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.769674 4869 scope.go:117] "RemoveContainer" containerID="00bdf8238b21905e53b616516ef820ef34410e0f8599b86d258ad0867f5eb3d7" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.794366 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.798093 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.801812 4869 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ab16cd8a-d018-4192-be1c-27c1f22f359c-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.801836 4869 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a01a674e-c971-4c67-8418-5ebd661f84dc-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.801847 4869 reconciler_common.go:293] "Volume detached for volume \"pvc-e0963118-064b-4753-9d52-6c6862da45a0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0963118-064b-4753-9d52-6c6862da45a0\") on node \"crc\" DevicePath \"\"" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.801858 4869 reconciler_common.go:293] "Volume detached for volume \"pvc-2e055ccd-c768-4ae2-b309-495a63280ec9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2e055ccd-c768-4ae2-b309-495a63280ec9\") on node \"crc\" DevicePath \"\"" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.812959 4869 scope.go:117] "RemoveContainer" containerID="714fa9199ad28cbbd24f545071833de02fa9f86b14a4af50dc35e3d30fd3ea02" Jan 30 12:11:26 crc kubenswrapper[4869]: E0130 12:11:26.818062 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"714fa9199ad28cbbd24f545071833de02fa9f86b14a4af50dc35e3d30fd3ea02\": container with ID starting with 714fa9199ad28cbbd24f545071833de02fa9f86b14a4af50dc35e3d30fd3ea02 not found: ID does not exist" containerID="714fa9199ad28cbbd24f545071833de02fa9f86b14a4af50dc35e3d30fd3ea02" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.818106 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"714fa9199ad28cbbd24f545071833de02fa9f86b14a4af50dc35e3d30fd3ea02"} err="failed to get container status \"714fa9199ad28cbbd24f545071833de02fa9f86b14a4af50dc35e3d30fd3ea02\": rpc error: code = NotFound desc = could not find container \"714fa9199ad28cbbd24f545071833de02fa9f86b14a4af50dc35e3d30fd3ea02\": container with ID starting with 714fa9199ad28cbbd24f545071833de02fa9f86b14a4af50dc35e3d30fd3ea02 not found: ID does not exist" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.818149 4869 scope.go:117] "RemoveContainer" containerID="00bdf8238b21905e53b616516ef820ef34410e0f8599b86d258ad0867f5eb3d7" Jan 30 12:11:26 crc kubenswrapper[4869]: E0130 12:11:26.818591 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00bdf8238b21905e53b616516ef820ef34410e0f8599b86d258ad0867f5eb3d7\": container with ID starting with 00bdf8238b21905e53b616516ef820ef34410e0f8599b86d258ad0867f5eb3d7 not found: ID does not exist" containerID="00bdf8238b21905e53b616516ef820ef34410e0f8599b86d258ad0867f5eb3d7" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.818616 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00bdf8238b21905e53b616516ef820ef34410e0f8599b86d258ad0867f5eb3d7"} err="failed to get container status \"00bdf8238b21905e53b616516ef820ef34410e0f8599b86d258ad0867f5eb3d7\": rpc error: code = NotFound desc = could not find container \"00bdf8238b21905e53b616516ef820ef34410e0f8599b86d258ad0867f5eb3d7\": container with ID starting with 00bdf8238b21905e53b616516ef820ef34410e0f8599b86d258ad0867f5eb3d7 not found: ID does not exist" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.818633 4869 scope.go:117] "RemoveContainer" containerID="31a7edab5cc21fbfb319fbb80dd7a427388fd5ade9116f26f9fea589bab4af52" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.843864 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Jan 30 12:11:26 crc kubenswrapper[4869]: E0130 12:11:26.844412 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab16cd8a-d018-4192-be1c-27c1f22f359c" containerName="setup-container" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.844429 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab16cd8a-d018-4192-be1c-27c1f22f359c" containerName="setup-container" Jan 30 12:11:26 crc kubenswrapper[4869]: E0130 12:11:26.844463 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a01a674e-c971-4c67-8418-5ebd661f84dc" containerName="rabbitmq" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.844469 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a01a674e-c971-4c67-8418-5ebd661f84dc" containerName="rabbitmq" Jan 30 12:11:26 crc kubenswrapper[4869]: E0130 12:11:26.844485 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab16cd8a-d018-4192-be1c-27c1f22f359c" containerName="rabbitmq" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.844493 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab16cd8a-d018-4192-be1c-27c1f22f359c" containerName="rabbitmq" Jan 30 12:11:26 crc kubenswrapper[4869]: E0130 12:11:26.844515 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a01a674e-c971-4c67-8418-5ebd661f84dc" containerName="setup-container" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.844521 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a01a674e-c971-4c67-8418-5ebd661f84dc" containerName="setup-container" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.846666 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab16cd8a-d018-4192-be1c-27c1f22f359c" containerName="rabbitmq" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.846691 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a01a674e-c971-4c67-8418-5ebd661f84dc" containerName="rabbitmq" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.849947 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.853214 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.853332 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.853459 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.857608 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.858878 4869 scope.go:117] "RemoveContainer" containerID="1e8633f6e9a6eaf5d95c64a62440869aa925b49e7b2cd87a4677c56359f6fc05" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.859396 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-6kgfd" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.862612 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.893264 4869 scope.go:117] "RemoveContainer" containerID="31a7edab5cc21fbfb319fbb80dd7a427388fd5ade9116f26f9fea589bab4af52" Jan 30 12:11:26 crc kubenswrapper[4869]: E0130 12:11:26.893774 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31a7edab5cc21fbfb319fbb80dd7a427388fd5ade9116f26f9fea589bab4af52\": container with ID starting with 31a7edab5cc21fbfb319fbb80dd7a427388fd5ade9116f26f9fea589bab4af52 not found: ID does not exist" containerID="31a7edab5cc21fbfb319fbb80dd7a427388fd5ade9116f26f9fea589bab4af52" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.893810 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31a7edab5cc21fbfb319fbb80dd7a427388fd5ade9116f26f9fea589bab4af52"} err="failed to get container status \"31a7edab5cc21fbfb319fbb80dd7a427388fd5ade9116f26f9fea589bab4af52\": rpc error: code = NotFound desc = could not find container \"31a7edab5cc21fbfb319fbb80dd7a427388fd5ade9116f26f9fea589bab4af52\": container with ID starting with 31a7edab5cc21fbfb319fbb80dd7a427388fd5ade9116f26f9fea589bab4af52 not found: ID does not exist" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.893835 4869 scope.go:117] "RemoveContainer" containerID="1e8633f6e9a6eaf5d95c64a62440869aa925b49e7b2cd87a4677c56359f6fc05" Jan 30 12:11:26 crc kubenswrapper[4869]: E0130 12:11:26.894220 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e8633f6e9a6eaf5d95c64a62440869aa925b49e7b2cd87a4677c56359f6fc05\": container with ID starting with 1e8633f6e9a6eaf5d95c64a62440869aa925b49e7b2cd87a4677c56359f6fc05 not found: ID does not exist" containerID="1e8633f6e9a6eaf5d95c64a62440869aa925b49e7b2cd87a4677c56359f6fc05" Jan 30 12:11:26 crc kubenswrapper[4869]: I0130 12:11:26.894245 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e8633f6e9a6eaf5d95c64a62440869aa925b49e7b2cd87a4677c56359f6fc05"} err="failed to get container status \"1e8633f6e9a6eaf5d95c64a62440869aa925b49e7b2cd87a4677c56359f6fc05\": rpc error: code = NotFound desc = could not find container \"1e8633f6e9a6eaf5d95c64a62440869aa925b49e7b2cd87a4677c56359f6fc05\": container with ID starting with 1e8633f6e9a6eaf5d95c64a62440869aa925b49e7b2cd87a4677c56359f6fc05 not found: ID does not exist" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.004777 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0f975286-9c86-4dd2-a1df-170254db9def-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.004835 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0f975286-9c86-4dd2-a1df-170254db9def-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.004887 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0f975286-9c86-4dd2-a1df-170254db9def-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.004956 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0f975286-9c86-4dd2-a1df-170254db9def-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.004979 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-2e055ccd-c768-4ae2-b309-495a63280ec9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2e055ccd-c768-4ae2-b309-495a63280ec9\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.005014 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0f975286-9c86-4dd2-a1df-170254db9def-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.005037 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0f975286-9c86-4dd2-a1df-170254db9def-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.005087 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6p92\" (UniqueName: \"kubernetes.io/projected/0f975286-9c86-4dd2-a1df-170254db9def-kube-api-access-s6p92\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.005116 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0f975286-9c86-4dd2-a1df-170254db9def-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.080657 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.093782 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.112677 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0f975286-9c86-4dd2-a1df-170254db9def-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.112990 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0f975286-9c86-4dd2-a1df-170254db9def-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.113095 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6p92\" (UniqueName: \"kubernetes.io/projected/0f975286-9c86-4dd2-a1df-170254db9def-kube-api-access-s6p92\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.113168 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0f975286-9c86-4dd2-a1df-170254db9def-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.113251 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0f975286-9c86-4dd2-a1df-170254db9def-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.113324 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0f975286-9c86-4dd2-a1df-170254db9def-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.113411 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0f975286-9c86-4dd2-a1df-170254db9def-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.113511 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0f975286-9c86-4dd2-a1df-170254db9def-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.113588 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-2e055ccd-c768-4ae2-b309-495a63280ec9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2e055ccd-c768-4ae2-b309-495a63280ec9\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.123785 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.125135 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.127331 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0f975286-9c86-4dd2-a1df-170254db9def-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.113523 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0f975286-9c86-4dd2-a1df-170254db9def-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.128774 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0f975286-9c86-4dd2-a1df-170254db9def-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.128997 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0f975286-9c86-4dd2-a1df-170254db9def-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.129872 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.154266 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.154603 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.154732 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.154842 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-hjdtv" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.167908 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.170669 4869 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.170737 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-2e055ccd-c768-4ae2-b309-495a63280ec9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2e055ccd-c768-4ae2-b309-495a63280ec9\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/21ec76f3ec42e2ecd6f85a7b4821af1c0eb541c56f5b6cd974b29ba9b4d436b8/globalmount\"" pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.317825 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8c74a886-5483-43ae-a293-00cf837302c6-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.317906 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8c74a886-5483-43ae-a293-00cf837302c6-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.318023 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8c74a886-5483-43ae-a293-00cf837302c6-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.318061 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e0963118-064b-4753-9d52-6c6862da45a0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0963118-064b-4753-9d52-6c6862da45a0\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.318088 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8c74a886-5483-43ae-a293-00cf837302c6-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.318130 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8c74a886-5483-43ae-a293-00cf837302c6-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.318162 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sv25z\" (UniqueName: \"kubernetes.io/projected/8c74a886-5483-43ae-a293-00cf837302c6-kube-api-access-sv25z\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.318197 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8c74a886-5483-43ae-a293-00cf837302c6-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.318248 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8c74a886-5483-43ae-a293-00cf837302c6-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.359739 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0f975286-9c86-4dd2-a1df-170254db9def-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.360555 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6p92\" (UniqueName: \"kubernetes.io/projected/0f975286-9c86-4dd2-a1df-170254db9def-kube-api-access-s6p92\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.360566 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0f975286-9c86-4dd2-a1df-170254db9def-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.367163 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0f975286-9c86-4dd2-a1df-170254db9def-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.386349 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-2e055ccd-c768-4ae2-b309-495a63280ec9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2e055ccd-c768-4ae2-b309-495a63280ec9\") pod \"rabbitmq-server-0\" (UID: \"0f975286-9c86-4dd2-a1df-170254db9def\") " pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.419591 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8c74a886-5483-43ae-a293-00cf837302c6-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.419651 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8c74a886-5483-43ae-a293-00cf837302c6-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.419679 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sv25z\" (UniqueName: \"kubernetes.io/projected/8c74a886-5483-43ae-a293-00cf837302c6-kube-api-access-sv25z\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.419726 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8c74a886-5483-43ae-a293-00cf837302c6-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.419774 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8c74a886-5483-43ae-a293-00cf837302c6-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.419811 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8c74a886-5483-43ae-a293-00cf837302c6-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.419843 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8c74a886-5483-43ae-a293-00cf837302c6-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.419908 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8c74a886-5483-43ae-a293-00cf837302c6-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.419933 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e0963118-064b-4753-9d52-6c6862da45a0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0963118-064b-4753-9d52-6c6862da45a0\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.420912 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8c74a886-5483-43ae-a293-00cf837302c6-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.421216 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8c74a886-5483-43ae-a293-00cf837302c6-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.423687 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8c74a886-5483-43ae-a293-00cf837302c6-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.424261 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8c74a886-5483-43ae-a293-00cf837302c6-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.424414 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8c74a886-5483-43ae-a293-00cf837302c6-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.426203 4869 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.426249 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e0963118-064b-4753-9d52-6c6862da45a0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0963118-064b-4753-9d52-6c6862da45a0\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/572454f230dc02e3d14e57d1b1caae9d3d3194b06bb04c0d46fe1bb6ecd2a96b/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.427261 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8c74a886-5483-43ae-a293-00cf837302c6-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.427326 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8c74a886-5483-43ae-a293-00cf837302c6-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.441573 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sv25z\" (UniqueName: \"kubernetes.io/projected/8c74a886-5483-43ae-a293-00cf837302c6-kube-api-access-sv25z\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.471694 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.473235 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e0963118-064b-4753-9d52-6c6862da45a0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0963118-064b-4753-9d52-6c6862da45a0\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c74a886-5483-43ae-a293-00cf837302c6\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.541733 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.554509 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98ddfc8f-2rtf2" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.731787 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qdz78\" (UniqueName: \"kubernetes.io/projected/4619f34b-3016-4243-a2f0-c916b5514512-kube-api-access-qdz78\") pod \"4619f34b-3016-4243-a2f0-c916b5514512\" (UID: \"4619f34b-3016-4243-a2f0-c916b5514512\") " Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.732120 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4619f34b-3016-4243-a2f0-c916b5514512-config\") pod \"4619f34b-3016-4243-a2f0-c916b5514512\" (UID: \"4619f34b-3016-4243-a2f0-c916b5514512\") " Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.732187 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4619f34b-3016-4243-a2f0-c916b5514512-dns-svc\") pod \"4619f34b-3016-4243-a2f0-c916b5514512\" (UID: \"4619f34b-3016-4243-a2f0-c916b5514512\") " Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.746347 4869 generic.go:334] "Generic (PLEG): container finished" podID="4619f34b-3016-4243-a2f0-c916b5514512" containerID="aad8d63e7267555a41647b92283b1bbb68a56230d5ef47d244c9f5b4b468dc19" exitCode=0 Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.746383 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98ddfc8f-2rtf2" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.746425 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-2rtf2" event={"ID":"4619f34b-3016-4243-a2f0-c916b5514512","Type":"ContainerDied","Data":"aad8d63e7267555a41647b92283b1bbb68a56230d5ef47d244c9f5b4b468dc19"} Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.746483 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-2rtf2" event={"ID":"4619f34b-3016-4243-a2f0-c916b5514512","Type":"ContainerDied","Data":"adea9865b14f8651d2a22fef13cc0dc6348bdc4e906d03cb714897c20b9043d2"} Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.746507 4869 scope.go:117] "RemoveContainer" containerID="aad8d63e7267555a41647b92283b1bbb68a56230d5ef47d244c9f5b4b468dc19" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.758294 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4619f34b-3016-4243-a2f0-c916b5514512-kube-api-access-qdz78" (OuterVolumeSpecName: "kube-api-access-qdz78") pod "4619f34b-3016-4243-a2f0-c916b5514512" (UID: "4619f34b-3016-4243-a2f0-c916b5514512"). InnerVolumeSpecName "kube-api-access-qdz78". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.767945 4869 scope.go:117] "RemoveContainer" containerID="271886521af0936be29a1861c2c707a2594d720bf9852febe473f705ef739daa" Jan 30 12:11:27 crc kubenswrapper[4869]: E0130 12:11:27.780813 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4619f34b-3016-4243-a2f0-c916b5514512-dns-svc podName:4619f34b-3016-4243-a2f0-c916b5514512 nodeName:}" failed. No retries permitted until 2026-01-30 12:11:28.280653091 +0000 UTC m=+4638.830529157 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "dns-svc" (UniqueName: "kubernetes.io/configmap/4619f34b-3016-4243-a2f0-c916b5514512-dns-svc") pod "4619f34b-3016-4243-a2f0-c916b5514512" (UID: "4619f34b-3016-4243-a2f0-c916b5514512") : error deleting /var/lib/kubelet/pods/4619f34b-3016-4243-a2f0-c916b5514512/volume-subpaths: remove /var/lib/kubelet/pods/4619f34b-3016-4243-a2f0-c916b5514512/volume-subpaths: no such file or directory Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.781015 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4619f34b-3016-4243-a2f0-c916b5514512-config" (OuterVolumeSpecName: "config") pod "4619f34b-3016-4243-a2f0-c916b5514512" (UID: "4619f34b-3016-4243-a2f0-c916b5514512"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.788120 4869 scope.go:117] "RemoveContainer" containerID="aad8d63e7267555a41647b92283b1bbb68a56230d5ef47d244c9f5b4b468dc19" Jan 30 12:11:27 crc kubenswrapper[4869]: E0130 12:11:27.788650 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aad8d63e7267555a41647b92283b1bbb68a56230d5ef47d244c9f5b4b468dc19\": container with ID starting with aad8d63e7267555a41647b92283b1bbb68a56230d5ef47d244c9f5b4b468dc19 not found: ID does not exist" containerID="aad8d63e7267555a41647b92283b1bbb68a56230d5ef47d244c9f5b4b468dc19" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.788701 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aad8d63e7267555a41647b92283b1bbb68a56230d5ef47d244c9f5b4b468dc19"} err="failed to get container status \"aad8d63e7267555a41647b92283b1bbb68a56230d5ef47d244c9f5b4b468dc19\": rpc error: code = NotFound desc = could not find container \"aad8d63e7267555a41647b92283b1bbb68a56230d5ef47d244c9f5b4b468dc19\": container with ID starting with aad8d63e7267555a41647b92283b1bbb68a56230d5ef47d244c9f5b4b468dc19 not found: ID does not exist" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.788794 4869 scope.go:117] "RemoveContainer" containerID="271886521af0936be29a1861c2c707a2594d720bf9852febe473f705ef739daa" Jan 30 12:11:27 crc kubenswrapper[4869]: E0130 12:11:27.790036 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"271886521af0936be29a1861c2c707a2594d720bf9852febe473f705ef739daa\": container with ID starting with 271886521af0936be29a1861c2c707a2594d720bf9852febe473f705ef739daa not found: ID does not exist" containerID="271886521af0936be29a1861c2c707a2594d720bf9852febe473f705ef739daa" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.790062 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"271886521af0936be29a1861c2c707a2594d720bf9852febe473f705ef739daa"} err="failed to get container status \"271886521af0936be29a1861c2c707a2594d720bf9852febe473f705ef739daa\": rpc error: code = NotFound desc = could not find container \"271886521af0936be29a1861c2c707a2594d720bf9852febe473f705ef739daa\": container with ID starting with 271886521af0936be29a1861c2c707a2594d720bf9852febe473f705ef739daa not found: ID does not exist" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.833997 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qdz78\" (UniqueName: \"kubernetes.io/projected/4619f34b-3016-4243-a2f0-c916b5514512-kube-api-access-qdz78\") on node \"crc\" DevicePath \"\"" Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.834031 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4619f34b-3016-4243-a2f0-c916b5514512-config\") on node \"crc\" DevicePath \"\"" Jan 30 12:11:27 crc kubenswrapper[4869]: W0130 12:11:27.939906 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0f975286_9c86_4dd2_a1df_170254db9def.slice/crio-5f869b1f870cf0a3bf5ed4c8c15c97705bad96f7f065210d4d2ef23404a3353d WatchSource:0}: Error finding container 5f869b1f870cf0a3bf5ed4c8c15c97705bad96f7f065210d4d2ef23404a3353d: Status 404 returned error can't find the container with id 5f869b1f870cf0a3bf5ed4c8c15c97705bad96f7f065210d4d2ef23404a3353d Jan 30 12:11:27 crc kubenswrapper[4869]: I0130 12:11:27.947202 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 30 12:11:28 crc kubenswrapper[4869]: I0130 12:11:28.050312 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 30 12:11:28 crc kubenswrapper[4869]: W0130 12:11:28.056452 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8c74a886_5483_43ae_a293_00cf837302c6.slice/crio-06a08f0d50ec837cdf575904e59409742711986433422a09bf20a273b8c72d78 WatchSource:0}: Error finding container 06a08f0d50ec837cdf575904e59409742711986433422a09bf20a273b8c72d78: Status 404 returned error can't find the container with id 06a08f0d50ec837cdf575904e59409742711986433422a09bf20a273b8c72d78 Jan 30 12:11:28 crc kubenswrapper[4869]: I0130 12:11:28.142588 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a01a674e-c971-4c67-8418-5ebd661f84dc" path="/var/lib/kubelet/pods/a01a674e-c971-4c67-8418-5ebd661f84dc/volumes" Jan 30 12:11:28 crc kubenswrapper[4869]: I0130 12:11:28.143728 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab16cd8a-d018-4192-be1c-27c1f22f359c" path="/var/lib/kubelet/pods/ab16cd8a-d018-4192-be1c-27c1f22f359c/volumes" Jan 30 12:11:28 crc kubenswrapper[4869]: I0130 12:11:28.344636 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4619f34b-3016-4243-a2f0-c916b5514512-dns-svc\") pod \"4619f34b-3016-4243-a2f0-c916b5514512\" (UID: \"4619f34b-3016-4243-a2f0-c916b5514512\") " Jan 30 12:11:28 crc kubenswrapper[4869]: I0130 12:11:28.345241 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4619f34b-3016-4243-a2f0-c916b5514512-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4619f34b-3016-4243-a2f0-c916b5514512" (UID: "4619f34b-3016-4243-a2f0-c916b5514512"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 12:11:28 crc kubenswrapper[4869]: I0130 12:11:28.345551 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4619f34b-3016-4243-a2f0-c916b5514512-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 12:11:28 crc kubenswrapper[4869]: I0130 12:11:28.376507 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-2rtf2"] Jan 30 12:11:28 crc kubenswrapper[4869]: I0130 12:11:28.383633 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-2rtf2"] Jan 30 12:11:28 crc kubenswrapper[4869]: I0130 12:11:28.759014 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8c74a886-5483-43ae-a293-00cf837302c6","Type":"ContainerStarted","Data":"06a08f0d50ec837cdf575904e59409742711986433422a09bf20a273b8c72d78"} Jan 30 12:11:28 crc kubenswrapper[4869]: I0130 12:11:28.761145 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0f975286-9c86-4dd2-a1df-170254db9def","Type":"ContainerStarted","Data":"5f869b1f870cf0a3bf5ed4c8c15c97705bad96f7f065210d4d2ef23404a3353d"} Jan 30 12:11:29 crc kubenswrapper[4869]: I0130 12:11:29.133314 4869 scope.go:117] "RemoveContainer" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" Jan 30 12:11:29 crc kubenswrapper[4869]: E0130 12:11:29.133645 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:11:29 crc kubenswrapper[4869]: I0130 12:11:29.769687 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0f975286-9c86-4dd2-a1df-170254db9def","Type":"ContainerStarted","Data":"2b146612916466502d9f1f9e0f7b038d7bda1a8551523c585c2b6700df622e6b"} Jan 30 12:11:29 crc kubenswrapper[4869]: I0130 12:11:29.772415 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8c74a886-5483-43ae-a293-00cf837302c6","Type":"ContainerStarted","Data":"21fbb85d4ab2863b49cd345515c3bb5387fc52d1f171b1604a9f6eb76638002c"} Jan 30 12:11:30 crc kubenswrapper[4869]: I0130 12:11:30.140811 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4619f34b-3016-4243-a2f0-c916b5514512" path="/var/lib/kubelet/pods/4619f34b-3016-4243-a2f0-c916b5514512/volumes" Jan 30 12:11:42 crc kubenswrapper[4869]: I0130 12:11:42.133019 4869 scope.go:117] "RemoveContainer" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" Jan 30 12:11:42 crc kubenswrapper[4869]: E0130 12:11:42.133672 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:11:57 crc kubenswrapper[4869]: I0130 12:11:57.133478 4869 scope.go:117] "RemoveContainer" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" Jan 30 12:11:57 crc kubenswrapper[4869]: E0130 12:11:57.134245 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:12:02 crc kubenswrapper[4869]: I0130 12:12:02.003036 4869 generic.go:334] "Generic (PLEG): container finished" podID="0f975286-9c86-4dd2-a1df-170254db9def" containerID="2b146612916466502d9f1f9e0f7b038d7bda1a8551523c585c2b6700df622e6b" exitCode=0 Jan 30 12:12:02 crc kubenswrapper[4869]: I0130 12:12:02.003140 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0f975286-9c86-4dd2-a1df-170254db9def","Type":"ContainerDied","Data":"2b146612916466502d9f1f9e0f7b038d7bda1a8551523c585c2b6700df622e6b"} Jan 30 12:12:02 crc kubenswrapper[4869]: I0130 12:12:02.005145 4869 generic.go:334] "Generic (PLEG): container finished" podID="8c74a886-5483-43ae-a293-00cf837302c6" containerID="21fbb85d4ab2863b49cd345515c3bb5387fc52d1f171b1604a9f6eb76638002c" exitCode=0 Jan 30 12:12:02 crc kubenswrapper[4869]: I0130 12:12:02.005202 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8c74a886-5483-43ae-a293-00cf837302c6","Type":"ContainerDied","Data":"21fbb85d4ab2863b49cd345515c3bb5387fc52d1f171b1604a9f6eb76638002c"} Jan 30 12:12:03 crc kubenswrapper[4869]: I0130 12:12:03.014399 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0f975286-9c86-4dd2-a1df-170254db9def","Type":"ContainerStarted","Data":"9a6c5c6fb34e147508c4f17de9b243a37205f7de0fdce382b604885542237f99"} Jan 30 12:12:03 crc kubenswrapper[4869]: I0130 12:12:03.014934 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Jan 30 12:12:03 crc kubenswrapper[4869]: I0130 12:12:03.016183 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8c74a886-5483-43ae-a293-00cf837302c6","Type":"ContainerStarted","Data":"b1015ab2424dca9cbfc07df7abe567122ba7b8e53ee1c25ccfb9cfd43421c193"} Jan 30 12:12:03 crc kubenswrapper[4869]: I0130 12:12:03.016440 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:12:03 crc kubenswrapper[4869]: I0130 12:12:03.037877 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.037855078 podStartE2EDuration="37.037855078s" podCreationTimestamp="2026-01-30 12:11:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:12:03.034327888 +0000 UTC m=+4673.584203974" watchObservedRunningTime="2026-01-30 12:12:03.037855078 +0000 UTC m=+4673.587731144" Jan 30 12:12:03 crc kubenswrapper[4869]: I0130 12:12:03.060132 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.06011134 podStartE2EDuration="36.06011134s" podCreationTimestamp="2026-01-30 12:11:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:12:03.054628945 +0000 UTC m=+4673.604505031" watchObservedRunningTime="2026-01-30 12:12:03.06011134 +0000 UTC m=+4673.609987406" Jan 30 12:12:10 crc kubenswrapper[4869]: I0130 12:12:10.137132 4869 scope.go:117] "RemoveContainer" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" Jan 30 12:12:10 crc kubenswrapper[4869]: E0130 12:12:10.137884 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:12:17 crc kubenswrapper[4869]: I0130 12:12:17.474948 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Jan 30 12:12:17 crc kubenswrapper[4869]: I0130 12:12:17.546622 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Jan 30 12:12:25 crc kubenswrapper[4869]: I0130 12:12:25.133418 4869 scope.go:117] "RemoveContainer" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" Jan 30 12:12:25 crc kubenswrapper[4869]: E0130 12:12:25.134108 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:12:28 crc kubenswrapper[4869]: I0130 12:12:28.601444 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Jan 30 12:12:28 crc kubenswrapper[4869]: E0130 12:12:28.602598 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4619f34b-3016-4243-a2f0-c916b5514512" containerName="dnsmasq-dns" Jan 30 12:12:28 crc kubenswrapper[4869]: I0130 12:12:28.602639 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4619f34b-3016-4243-a2f0-c916b5514512" containerName="dnsmasq-dns" Jan 30 12:12:28 crc kubenswrapper[4869]: E0130 12:12:28.602681 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4619f34b-3016-4243-a2f0-c916b5514512" containerName="init" Jan 30 12:12:28 crc kubenswrapper[4869]: I0130 12:12:28.602689 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4619f34b-3016-4243-a2f0-c916b5514512" containerName="init" Jan 30 12:12:28 crc kubenswrapper[4869]: I0130 12:12:28.603724 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="4619f34b-3016-4243-a2f0-c916b5514512" containerName="dnsmasq-dns" Jan 30 12:12:28 crc kubenswrapper[4869]: I0130 12:12:28.604695 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 30 12:12:28 crc kubenswrapper[4869]: I0130 12:12:28.609944 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-mpj6x" Jan 30 12:12:28 crc kubenswrapper[4869]: I0130 12:12:28.633641 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 30 12:12:28 crc kubenswrapper[4869]: I0130 12:12:28.700520 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrptx\" (UniqueName: \"kubernetes.io/projected/27a51261-d093-4a12-b828-eb8cc1f8c598-kube-api-access-hrptx\") pod \"mariadb-client\" (UID: \"27a51261-d093-4a12-b828-eb8cc1f8c598\") " pod="openstack/mariadb-client" Jan 30 12:12:28 crc kubenswrapper[4869]: I0130 12:12:28.801655 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrptx\" (UniqueName: \"kubernetes.io/projected/27a51261-d093-4a12-b828-eb8cc1f8c598-kube-api-access-hrptx\") pod \"mariadb-client\" (UID: \"27a51261-d093-4a12-b828-eb8cc1f8c598\") " pod="openstack/mariadb-client" Jan 30 12:12:28 crc kubenswrapper[4869]: I0130 12:12:28.820153 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrptx\" (UniqueName: \"kubernetes.io/projected/27a51261-d093-4a12-b828-eb8cc1f8c598-kube-api-access-hrptx\") pod \"mariadb-client\" (UID: \"27a51261-d093-4a12-b828-eb8cc1f8c598\") " pod="openstack/mariadb-client" Jan 30 12:12:28 crc kubenswrapper[4869]: I0130 12:12:28.927015 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 30 12:12:29 crc kubenswrapper[4869]: I0130 12:12:29.400790 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 30 12:12:30 crc kubenswrapper[4869]: I0130 12:12:30.212648 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"27a51261-d093-4a12-b828-eb8cc1f8c598","Type":"ContainerStarted","Data":"89014abb1506ea2be8a98fe3306aecfb2a625c4a333140a534b987ab9685c16c"} Jan 30 12:12:30 crc kubenswrapper[4869]: I0130 12:12:30.213103 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"27a51261-d093-4a12-b828-eb8cc1f8c598","Type":"ContainerStarted","Data":"6964f88e657c5aef61c741308556e6d635edfba058a8cf15bcf2a1ab928bf1a3"} Jan 30 12:12:30 crc kubenswrapper[4869]: I0130 12:12:30.229583 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client" podStartSLOduration=2.229563614 podStartE2EDuration="2.229563614s" podCreationTimestamp="2026-01-30 12:12:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:12:30.225502879 +0000 UTC m=+4700.775378945" watchObservedRunningTime="2026-01-30 12:12:30.229563614 +0000 UTC m=+4700.779439680" Jan 30 12:12:39 crc kubenswrapper[4869]: I0130 12:12:39.133401 4869 scope.go:117] "RemoveContainer" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" Jan 30 12:12:39 crc kubenswrapper[4869]: E0130 12:12:39.134142 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:12:41 crc kubenswrapper[4869]: I0130 12:12:41.644347 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mw7kt"] Jan 30 12:12:41 crc kubenswrapper[4869]: I0130 12:12:41.646191 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mw7kt" Jan 30 12:12:41 crc kubenswrapper[4869]: I0130 12:12:41.660461 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mw7kt"] Jan 30 12:12:41 crc kubenswrapper[4869]: I0130 12:12:41.826847 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gczwq\" (UniqueName: \"kubernetes.io/projected/95775798-62a2-4667-b473-57e02d68ddc0-kube-api-access-gczwq\") pod \"certified-operators-mw7kt\" (UID: \"95775798-62a2-4667-b473-57e02d68ddc0\") " pod="openshift-marketplace/certified-operators-mw7kt" Jan 30 12:12:41 crc kubenswrapper[4869]: I0130 12:12:41.826932 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95775798-62a2-4667-b473-57e02d68ddc0-catalog-content\") pod \"certified-operators-mw7kt\" (UID: \"95775798-62a2-4667-b473-57e02d68ddc0\") " pod="openshift-marketplace/certified-operators-mw7kt" Jan 30 12:12:41 crc kubenswrapper[4869]: I0130 12:12:41.827172 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95775798-62a2-4667-b473-57e02d68ddc0-utilities\") pod \"certified-operators-mw7kt\" (UID: \"95775798-62a2-4667-b473-57e02d68ddc0\") " pod="openshift-marketplace/certified-operators-mw7kt" Jan 30 12:12:41 crc kubenswrapper[4869]: I0130 12:12:41.928689 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gczwq\" (UniqueName: \"kubernetes.io/projected/95775798-62a2-4667-b473-57e02d68ddc0-kube-api-access-gczwq\") pod \"certified-operators-mw7kt\" (UID: \"95775798-62a2-4667-b473-57e02d68ddc0\") " pod="openshift-marketplace/certified-operators-mw7kt" Jan 30 12:12:41 crc kubenswrapper[4869]: I0130 12:12:41.928794 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95775798-62a2-4667-b473-57e02d68ddc0-catalog-content\") pod \"certified-operators-mw7kt\" (UID: \"95775798-62a2-4667-b473-57e02d68ddc0\") " pod="openshift-marketplace/certified-operators-mw7kt" Jan 30 12:12:41 crc kubenswrapper[4869]: I0130 12:12:41.928866 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95775798-62a2-4667-b473-57e02d68ddc0-utilities\") pod \"certified-operators-mw7kt\" (UID: \"95775798-62a2-4667-b473-57e02d68ddc0\") " pod="openshift-marketplace/certified-operators-mw7kt" Jan 30 12:12:41 crc kubenswrapper[4869]: I0130 12:12:41.929565 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95775798-62a2-4667-b473-57e02d68ddc0-catalog-content\") pod \"certified-operators-mw7kt\" (UID: \"95775798-62a2-4667-b473-57e02d68ddc0\") " pod="openshift-marketplace/certified-operators-mw7kt" Jan 30 12:12:41 crc kubenswrapper[4869]: I0130 12:12:41.929602 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95775798-62a2-4667-b473-57e02d68ddc0-utilities\") pod \"certified-operators-mw7kt\" (UID: \"95775798-62a2-4667-b473-57e02d68ddc0\") " pod="openshift-marketplace/certified-operators-mw7kt" Jan 30 12:12:41 crc kubenswrapper[4869]: I0130 12:12:41.950986 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gczwq\" (UniqueName: \"kubernetes.io/projected/95775798-62a2-4667-b473-57e02d68ddc0-kube-api-access-gczwq\") pod \"certified-operators-mw7kt\" (UID: \"95775798-62a2-4667-b473-57e02d68ddc0\") " pod="openshift-marketplace/certified-operators-mw7kt" Jan 30 12:12:41 crc kubenswrapper[4869]: I0130 12:12:41.967127 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mw7kt" Jan 30 12:12:42 crc kubenswrapper[4869]: I0130 12:12:42.357508 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mw7kt"] Jan 30 12:12:43 crc kubenswrapper[4869]: I0130 12:12:43.307473 4869 generic.go:334] "Generic (PLEG): container finished" podID="95775798-62a2-4667-b473-57e02d68ddc0" containerID="2b8b8003cea62451c509dac1a158aac12f82c97a197e3794b7d6aefafc785cd9" exitCode=0 Jan 30 12:12:43 crc kubenswrapper[4869]: I0130 12:12:43.307580 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mw7kt" event={"ID":"95775798-62a2-4667-b473-57e02d68ddc0","Type":"ContainerDied","Data":"2b8b8003cea62451c509dac1a158aac12f82c97a197e3794b7d6aefafc785cd9"} Jan 30 12:12:43 crc kubenswrapper[4869]: I0130 12:12:43.307761 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mw7kt" event={"ID":"95775798-62a2-4667-b473-57e02d68ddc0","Type":"ContainerStarted","Data":"2fb7da55d6efe71e18e173ac9467ca96066fb47153bd1f7108b04c43c88fbd21"} Jan 30 12:12:43 crc kubenswrapper[4869]: I0130 12:12:43.376946 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Jan 30 12:12:43 crc kubenswrapper[4869]: I0130 12:12:43.377232 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/mariadb-client" podUID="27a51261-d093-4a12-b828-eb8cc1f8c598" containerName="mariadb-client" containerID="cri-o://89014abb1506ea2be8a98fe3306aecfb2a625c4a333140a534b987ab9685c16c" gracePeriod=30 Jan 30 12:12:44 crc kubenswrapper[4869]: I0130 12:12:44.051599 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 30 12:12:44 crc kubenswrapper[4869]: I0130 12:12:44.161360 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hrptx\" (UniqueName: \"kubernetes.io/projected/27a51261-d093-4a12-b828-eb8cc1f8c598-kube-api-access-hrptx\") pod \"27a51261-d093-4a12-b828-eb8cc1f8c598\" (UID: \"27a51261-d093-4a12-b828-eb8cc1f8c598\") " Jan 30 12:12:44 crc kubenswrapper[4869]: I0130 12:12:44.166984 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27a51261-d093-4a12-b828-eb8cc1f8c598-kube-api-access-hrptx" (OuterVolumeSpecName: "kube-api-access-hrptx") pod "27a51261-d093-4a12-b828-eb8cc1f8c598" (UID: "27a51261-d093-4a12-b828-eb8cc1f8c598"). InnerVolumeSpecName "kube-api-access-hrptx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:12:44 crc kubenswrapper[4869]: I0130 12:12:44.263318 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hrptx\" (UniqueName: \"kubernetes.io/projected/27a51261-d093-4a12-b828-eb8cc1f8c598-kube-api-access-hrptx\") on node \"crc\" DevicePath \"\"" Jan 30 12:12:44 crc kubenswrapper[4869]: I0130 12:12:44.315740 4869 generic.go:334] "Generic (PLEG): container finished" podID="27a51261-d093-4a12-b828-eb8cc1f8c598" containerID="89014abb1506ea2be8a98fe3306aecfb2a625c4a333140a534b987ab9685c16c" exitCode=143 Jan 30 12:12:44 crc kubenswrapper[4869]: I0130 12:12:44.315802 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"27a51261-d093-4a12-b828-eb8cc1f8c598","Type":"ContainerDied","Data":"89014abb1506ea2be8a98fe3306aecfb2a625c4a333140a534b987ab9685c16c"} Jan 30 12:12:44 crc kubenswrapper[4869]: I0130 12:12:44.315808 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 30 12:12:44 crc kubenswrapper[4869]: I0130 12:12:44.315866 4869 scope.go:117] "RemoveContainer" containerID="89014abb1506ea2be8a98fe3306aecfb2a625c4a333140a534b987ab9685c16c" Jan 30 12:12:44 crc kubenswrapper[4869]: I0130 12:12:44.315850 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"27a51261-d093-4a12-b828-eb8cc1f8c598","Type":"ContainerDied","Data":"6964f88e657c5aef61c741308556e6d635edfba058a8cf15bcf2a1ab928bf1a3"} Jan 30 12:12:44 crc kubenswrapper[4869]: I0130 12:12:44.349352 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Jan 30 12:12:44 crc kubenswrapper[4869]: I0130 12:12:44.355245 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Jan 30 12:12:44 crc kubenswrapper[4869]: I0130 12:12:44.549231 4869 scope.go:117] "RemoveContainer" containerID="89014abb1506ea2be8a98fe3306aecfb2a625c4a333140a534b987ab9685c16c" Jan 30 12:12:44 crc kubenswrapper[4869]: E0130 12:12:44.549684 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89014abb1506ea2be8a98fe3306aecfb2a625c4a333140a534b987ab9685c16c\": container with ID starting with 89014abb1506ea2be8a98fe3306aecfb2a625c4a333140a534b987ab9685c16c not found: ID does not exist" containerID="89014abb1506ea2be8a98fe3306aecfb2a625c4a333140a534b987ab9685c16c" Jan 30 12:12:44 crc kubenswrapper[4869]: I0130 12:12:44.549750 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89014abb1506ea2be8a98fe3306aecfb2a625c4a333140a534b987ab9685c16c"} err="failed to get container status \"89014abb1506ea2be8a98fe3306aecfb2a625c4a333140a534b987ab9685c16c\": rpc error: code = NotFound desc = could not find container \"89014abb1506ea2be8a98fe3306aecfb2a625c4a333140a534b987ab9685c16c\": container with ID starting with 89014abb1506ea2be8a98fe3306aecfb2a625c4a333140a534b987ab9685c16c not found: ID does not exist" Jan 30 12:12:46 crc kubenswrapper[4869]: I0130 12:12:46.143099 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27a51261-d093-4a12-b828-eb8cc1f8c598" path="/var/lib/kubelet/pods/27a51261-d093-4a12-b828-eb8cc1f8c598/volumes" Jan 30 12:12:46 crc kubenswrapper[4869]: I0130 12:12:46.332063 4869 generic.go:334] "Generic (PLEG): container finished" podID="95775798-62a2-4667-b473-57e02d68ddc0" containerID="724ad71828d00a501980da9e360acb6b051e449b958484cfbb4cc00f5177c500" exitCode=0 Jan 30 12:12:46 crc kubenswrapper[4869]: I0130 12:12:46.332116 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mw7kt" event={"ID":"95775798-62a2-4667-b473-57e02d68ddc0","Type":"ContainerDied","Data":"724ad71828d00a501980da9e360acb6b051e449b958484cfbb4cc00f5177c500"} Jan 30 12:12:48 crc kubenswrapper[4869]: I0130 12:12:48.346731 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mw7kt" event={"ID":"95775798-62a2-4667-b473-57e02d68ddc0","Type":"ContainerStarted","Data":"241e1c084daa195e1006adcc984044b236af1a617f2090fd29697e3e1bc74ea8"} Jan 30 12:12:48 crc kubenswrapper[4869]: I0130 12:12:48.367169 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mw7kt" podStartSLOduration=3.307635239 podStartE2EDuration="7.367147801s" podCreationTimestamp="2026-01-30 12:12:41 +0000 UTC" firstStartedPulling="2026-01-30 12:12:43.311113337 +0000 UTC m=+4713.860989403" lastFinishedPulling="2026-01-30 12:12:47.370625899 +0000 UTC m=+4717.920501965" observedRunningTime="2026-01-30 12:12:48.363558739 +0000 UTC m=+4718.913434825" watchObservedRunningTime="2026-01-30 12:12:48.367147801 +0000 UTC m=+4718.917023867" Jan 30 12:12:51 crc kubenswrapper[4869]: I0130 12:12:51.968104 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mw7kt" Jan 30 12:12:51 crc kubenswrapper[4869]: I0130 12:12:51.968418 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mw7kt" Jan 30 12:12:52 crc kubenswrapper[4869]: I0130 12:12:52.013820 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mw7kt" Jan 30 12:12:52 crc kubenswrapper[4869]: I0130 12:12:52.426977 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mw7kt" Jan 30 12:12:53 crc kubenswrapper[4869]: I0130 12:12:53.629562 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mw7kt"] Jan 30 12:12:54 crc kubenswrapper[4869]: I0130 12:12:54.133113 4869 scope.go:117] "RemoveContainer" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" Jan 30 12:12:54 crc kubenswrapper[4869]: E0130 12:12:54.133431 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:12:54 crc kubenswrapper[4869]: I0130 12:12:54.392815 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mw7kt" podUID="95775798-62a2-4667-b473-57e02d68ddc0" containerName="registry-server" containerID="cri-o://241e1c084daa195e1006adcc984044b236af1a617f2090fd29697e3e1bc74ea8" gracePeriod=2 Jan 30 12:12:54 crc kubenswrapper[4869]: I0130 12:12:54.849330 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mw7kt" Jan 30 12:12:55 crc kubenswrapper[4869]: I0130 12:12:55.049086 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95775798-62a2-4667-b473-57e02d68ddc0-catalog-content\") pod \"95775798-62a2-4667-b473-57e02d68ddc0\" (UID: \"95775798-62a2-4667-b473-57e02d68ddc0\") " Jan 30 12:12:55 crc kubenswrapper[4869]: I0130 12:12:55.049175 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95775798-62a2-4667-b473-57e02d68ddc0-utilities\") pod \"95775798-62a2-4667-b473-57e02d68ddc0\" (UID: \"95775798-62a2-4667-b473-57e02d68ddc0\") " Jan 30 12:12:55 crc kubenswrapper[4869]: I0130 12:12:55.049226 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gczwq\" (UniqueName: \"kubernetes.io/projected/95775798-62a2-4667-b473-57e02d68ddc0-kube-api-access-gczwq\") pod \"95775798-62a2-4667-b473-57e02d68ddc0\" (UID: \"95775798-62a2-4667-b473-57e02d68ddc0\") " Jan 30 12:12:55 crc kubenswrapper[4869]: I0130 12:12:55.051253 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95775798-62a2-4667-b473-57e02d68ddc0-utilities" (OuterVolumeSpecName: "utilities") pod "95775798-62a2-4667-b473-57e02d68ddc0" (UID: "95775798-62a2-4667-b473-57e02d68ddc0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 12:12:55 crc kubenswrapper[4869]: I0130 12:12:55.055026 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95775798-62a2-4667-b473-57e02d68ddc0-kube-api-access-gczwq" (OuterVolumeSpecName: "kube-api-access-gczwq") pod "95775798-62a2-4667-b473-57e02d68ddc0" (UID: "95775798-62a2-4667-b473-57e02d68ddc0"). InnerVolumeSpecName "kube-api-access-gczwq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:12:55 crc kubenswrapper[4869]: I0130 12:12:55.107459 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95775798-62a2-4667-b473-57e02d68ddc0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "95775798-62a2-4667-b473-57e02d68ddc0" (UID: "95775798-62a2-4667-b473-57e02d68ddc0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 12:12:55 crc kubenswrapper[4869]: I0130 12:12:55.151258 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95775798-62a2-4667-b473-57e02d68ddc0-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 12:12:55 crc kubenswrapper[4869]: I0130 12:12:55.151307 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95775798-62a2-4667-b473-57e02d68ddc0-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 12:12:55 crc kubenswrapper[4869]: I0130 12:12:55.151321 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gczwq\" (UniqueName: \"kubernetes.io/projected/95775798-62a2-4667-b473-57e02d68ddc0-kube-api-access-gczwq\") on node \"crc\" DevicePath \"\"" Jan 30 12:12:55 crc kubenswrapper[4869]: I0130 12:12:55.401954 4869 generic.go:334] "Generic (PLEG): container finished" podID="95775798-62a2-4667-b473-57e02d68ddc0" containerID="241e1c084daa195e1006adcc984044b236af1a617f2090fd29697e3e1bc74ea8" exitCode=0 Jan 30 12:12:55 crc kubenswrapper[4869]: I0130 12:12:55.402008 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mw7kt" event={"ID":"95775798-62a2-4667-b473-57e02d68ddc0","Type":"ContainerDied","Data":"241e1c084daa195e1006adcc984044b236af1a617f2090fd29697e3e1bc74ea8"} Jan 30 12:12:55 crc kubenswrapper[4869]: I0130 12:12:55.402039 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mw7kt" event={"ID":"95775798-62a2-4667-b473-57e02d68ddc0","Type":"ContainerDied","Data":"2fb7da55d6efe71e18e173ac9467ca96066fb47153bd1f7108b04c43c88fbd21"} Jan 30 12:12:55 crc kubenswrapper[4869]: I0130 12:12:55.402058 4869 scope.go:117] "RemoveContainer" containerID="241e1c084daa195e1006adcc984044b236af1a617f2090fd29697e3e1bc74ea8" Jan 30 12:12:55 crc kubenswrapper[4869]: I0130 12:12:55.402063 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mw7kt" Jan 30 12:12:55 crc kubenswrapper[4869]: I0130 12:12:55.428301 4869 scope.go:117] "RemoveContainer" containerID="724ad71828d00a501980da9e360acb6b051e449b958484cfbb4cc00f5177c500" Jan 30 12:12:55 crc kubenswrapper[4869]: I0130 12:12:55.434512 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mw7kt"] Jan 30 12:12:55 crc kubenswrapper[4869]: I0130 12:12:55.440033 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mw7kt"] Jan 30 12:12:55 crc kubenswrapper[4869]: I0130 12:12:55.449080 4869 scope.go:117] "RemoveContainer" containerID="2b8b8003cea62451c509dac1a158aac12f82c97a197e3794b7d6aefafc785cd9" Jan 30 12:12:55 crc kubenswrapper[4869]: I0130 12:12:55.483204 4869 scope.go:117] "RemoveContainer" containerID="241e1c084daa195e1006adcc984044b236af1a617f2090fd29697e3e1bc74ea8" Jan 30 12:12:55 crc kubenswrapper[4869]: E0130 12:12:55.483647 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"241e1c084daa195e1006adcc984044b236af1a617f2090fd29697e3e1bc74ea8\": container with ID starting with 241e1c084daa195e1006adcc984044b236af1a617f2090fd29697e3e1bc74ea8 not found: ID does not exist" containerID="241e1c084daa195e1006adcc984044b236af1a617f2090fd29697e3e1bc74ea8" Jan 30 12:12:55 crc kubenswrapper[4869]: I0130 12:12:55.483680 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"241e1c084daa195e1006adcc984044b236af1a617f2090fd29697e3e1bc74ea8"} err="failed to get container status \"241e1c084daa195e1006adcc984044b236af1a617f2090fd29697e3e1bc74ea8\": rpc error: code = NotFound desc = could not find container \"241e1c084daa195e1006adcc984044b236af1a617f2090fd29697e3e1bc74ea8\": container with ID starting with 241e1c084daa195e1006adcc984044b236af1a617f2090fd29697e3e1bc74ea8 not found: ID does not exist" Jan 30 12:12:55 crc kubenswrapper[4869]: I0130 12:12:55.483721 4869 scope.go:117] "RemoveContainer" containerID="724ad71828d00a501980da9e360acb6b051e449b958484cfbb4cc00f5177c500" Jan 30 12:12:55 crc kubenswrapper[4869]: E0130 12:12:55.484148 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"724ad71828d00a501980da9e360acb6b051e449b958484cfbb4cc00f5177c500\": container with ID starting with 724ad71828d00a501980da9e360acb6b051e449b958484cfbb4cc00f5177c500 not found: ID does not exist" containerID="724ad71828d00a501980da9e360acb6b051e449b958484cfbb4cc00f5177c500" Jan 30 12:12:55 crc kubenswrapper[4869]: I0130 12:12:55.484268 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"724ad71828d00a501980da9e360acb6b051e449b958484cfbb4cc00f5177c500"} err="failed to get container status \"724ad71828d00a501980da9e360acb6b051e449b958484cfbb4cc00f5177c500\": rpc error: code = NotFound desc = could not find container \"724ad71828d00a501980da9e360acb6b051e449b958484cfbb4cc00f5177c500\": container with ID starting with 724ad71828d00a501980da9e360acb6b051e449b958484cfbb4cc00f5177c500 not found: ID does not exist" Jan 30 12:12:55 crc kubenswrapper[4869]: I0130 12:12:55.484359 4869 scope.go:117] "RemoveContainer" containerID="2b8b8003cea62451c509dac1a158aac12f82c97a197e3794b7d6aefafc785cd9" Jan 30 12:12:55 crc kubenswrapper[4869]: E0130 12:12:55.485105 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b8b8003cea62451c509dac1a158aac12f82c97a197e3794b7d6aefafc785cd9\": container with ID starting with 2b8b8003cea62451c509dac1a158aac12f82c97a197e3794b7d6aefafc785cd9 not found: ID does not exist" containerID="2b8b8003cea62451c509dac1a158aac12f82c97a197e3794b7d6aefafc785cd9" Jan 30 12:12:55 crc kubenswrapper[4869]: I0130 12:12:55.485147 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b8b8003cea62451c509dac1a158aac12f82c97a197e3794b7d6aefafc785cd9"} err="failed to get container status \"2b8b8003cea62451c509dac1a158aac12f82c97a197e3794b7d6aefafc785cd9\": rpc error: code = NotFound desc = could not find container \"2b8b8003cea62451c509dac1a158aac12f82c97a197e3794b7d6aefafc785cd9\": container with ID starting with 2b8b8003cea62451c509dac1a158aac12f82c97a197e3794b7d6aefafc785cd9 not found: ID does not exist" Jan 30 12:12:56 crc kubenswrapper[4869]: I0130 12:12:56.145948 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95775798-62a2-4667-b473-57e02d68ddc0" path="/var/lib/kubelet/pods/95775798-62a2-4667-b473-57e02d68ddc0/volumes" Jan 30 12:13:07 crc kubenswrapper[4869]: I0130 12:13:07.133664 4869 scope.go:117] "RemoveContainer" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" Jan 30 12:13:07 crc kubenswrapper[4869]: E0130 12:13:07.134435 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:13:15 crc kubenswrapper[4869]: I0130 12:13:15.787035 4869 scope.go:117] "RemoveContainer" containerID="ec10f3d2f632f1bae5577994d31ce20d3ee74c3e304b83a2dda2a86f0c712516" Jan 30 12:13:19 crc kubenswrapper[4869]: I0130 12:13:19.133786 4869 scope.go:117] "RemoveContainer" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" Jan 30 12:13:19 crc kubenswrapper[4869]: E0130 12:13:19.134595 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:13:34 crc kubenswrapper[4869]: I0130 12:13:34.133519 4869 scope.go:117] "RemoveContainer" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" Jan 30 12:13:34 crc kubenswrapper[4869]: E0130 12:13:34.134242 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:13:49 crc kubenswrapper[4869]: I0130 12:13:49.133592 4869 scope.go:117] "RemoveContainer" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" Jan 30 12:13:49 crc kubenswrapper[4869]: E0130 12:13:49.134287 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:14:02 crc kubenswrapper[4869]: I0130 12:14:02.133209 4869 scope.go:117] "RemoveContainer" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" Jan 30 12:14:02 crc kubenswrapper[4869]: E0130 12:14:02.133974 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:14:14 crc kubenswrapper[4869]: I0130 12:14:14.135060 4869 scope.go:117] "RemoveContainer" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" Jan 30 12:14:14 crc kubenswrapper[4869]: E0130 12:14:14.135969 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:14:29 crc kubenswrapper[4869]: I0130 12:14:29.133206 4869 scope.go:117] "RemoveContainer" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" Jan 30 12:14:29 crc kubenswrapper[4869]: E0130 12:14:29.134091 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:14:40 crc kubenswrapper[4869]: I0130 12:14:40.138788 4869 scope.go:117] "RemoveContainer" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" Jan 30 12:14:40 crc kubenswrapper[4869]: E0130 12:14:40.139629 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:14:54 crc kubenswrapper[4869]: I0130 12:14:54.133447 4869 scope.go:117] "RemoveContainer" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" Jan 30 12:14:54 crc kubenswrapper[4869]: E0130 12:14:54.134387 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:15:00 crc kubenswrapper[4869]: I0130 12:15:00.152368 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496255-wrwqn"] Jan 30 12:15:00 crc kubenswrapper[4869]: E0130 12:15:00.154596 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95775798-62a2-4667-b473-57e02d68ddc0" containerName="extract-utilities" Jan 30 12:15:00 crc kubenswrapper[4869]: I0130 12:15:00.154628 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="95775798-62a2-4667-b473-57e02d68ddc0" containerName="extract-utilities" Jan 30 12:15:00 crc kubenswrapper[4869]: E0130 12:15:00.154660 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95775798-62a2-4667-b473-57e02d68ddc0" containerName="registry-server" Jan 30 12:15:00 crc kubenswrapper[4869]: I0130 12:15:00.154673 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="95775798-62a2-4667-b473-57e02d68ddc0" containerName="registry-server" Jan 30 12:15:00 crc kubenswrapper[4869]: E0130 12:15:00.154685 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27a51261-d093-4a12-b828-eb8cc1f8c598" containerName="mariadb-client" Jan 30 12:15:00 crc kubenswrapper[4869]: I0130 12:15:00.154693 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="27a51261-d093-4a12-b828-eb8cc1f8c598" containerName="mariadb-client" Jan 30 12:15:00 crc kubenswrapper[4869]: E0130 12:15:00.154705 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95775798-62a2-4667-b473-57e02d68ddc0" containerName="extract-content" Jan 30 12:15:00 crc kubenswrapper[4869]: I0130 12:15:00.154733 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="95775798-62a2-4667-b473-57e02d68ddc0" containerName="extract-content" Jan 30 12:15:00 crc kubenswrapper[4869]: I0130 12:15:00.154986 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="95775798-62a2-4667-b473-57e02d68ddc0" containerName="registry-server" Jan 30 12:15:00 crc kubenswrapper[4869]: I0130 12:15:00.155011 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="27a51261-d093-4a12-b828-eb8cc1f8c598" containerName="mariadb-client" Jan 30 12:15:00 crc kubenswrapper[4869]: I0130 12:15:00.155606 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496255-wrwqn" Jan 30 12:15:00 crc kubenswrapper[4869]: I0130 12:15:00.158766 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496255-wrwqn"] Jan 30 12:15:00 crc kubenswrapper[4869]: I0130 12:15:00.159308 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 30 12:15:00 crc kubenswrapper[4869]: I0130 12:15:00.159559 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 30 12:15:00 crc kubenswrapper[4869]: I0130 12:15:00.273436 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/93a62959-e47a-4348-9f46-90ade5be3344-secret-volume\") pod \"collect-profiles-29496255-wrwqn\" (UID: \"93a62959-e47a-4348-9f46-90ade5be3344\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496255-wrwqn" Jan 30 12:15:00 crc kubenswrapper[4869]: I0130 12:15:00.273504 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vzsd\" (UniqueName: \"kubernetes.io/projected/93a62959-e47a-4348-9f46-90ade5be3344-kube-api-access-4vzsd\") pod \"collect-profiles-29496255-wrwqn\" (UID: \"93a62959-e47a-4348-9f46-90ade5be3344\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496255-wrwqn" Jan 30 12:15:00 crc kubenswrapper[4869]: I0130 12:15:00.274173 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/93a62959-e47a-4348-9f46-90ade5be3344-config-volume\") pod \"collect-profiles-29496255-wrwqn\" (UID: \"93a62959-e47a-4348-9f46-90ade5be3344\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496255-wrwqn" Jan 30 12:15:00 crc kubenswrapper[4869]: I0130 12:15:00.375777 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/93a62959-e47a-4348-9f46-90ade5be3344-secret-volume\") pod \"collect-profiles-29496255-wrwqn\" (UID: \"93a62959-e47a-4348-9f46-90ade5be3344\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496255-wrwqn" Jan 30 12:15:00 crc kubenswrapper[4869]: I0130 12:15:00.375862 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vzsd\" (UniqueName: \"kubernetes.io/projected/93a62959-e47a-4348-9f46-90ade5be3344-kube-api-access-4vzsd\") pod \"collect-profiles-29496255-wrwqn\" (UID: \"93a62959-e47a-4348-9f46-90ade5be3344\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496255-wrwqn" Jan 30 12:15:00 crc kubenswrapper[4869]: I0130 12:15:00.375946 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/93a62959-e47a-4348-9f46-90ade5be3344-config-volume\") pod \"collect-profiles-29496255-wrwqn\" (UID: \"93a62959-e47a-4348-9f46-90ade5be3344\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496255-wrwqn" Jan 30 12:15:00 crc kubenswrapper[4869]: I0130 12:15:00.377161 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/93a62959-e47a-4348-9f46-90ade5be3344-config-volume\") pod \"collect-profiles-29496255-wrwqn\" (UID: \"93a62959-e47a-4348-9f46-90ade5be3344\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496255-wrwqn" Jan 30 12:15:00 crc kubenswrapper[4869]: I0130 12:15:00.382396 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/93a62959-e47a-4348-9f46-90ade5be3344-secret-volume\") pod \"collect-profiles-29496255-wrwqn\" (UID: \"93a62959-e47a-4348-9f46-90ade5be3344\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496255-wrwqn" Jan 30 12:15:00 crc kubenswrapper[4869]: I0130 12:15:00.395811 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vzsd\" (UniqueName: \"kubernetes.io/projected/93a62959-e47a-4348-9f46-90ade5be3344-kube-api-access-4vzsd\") pod \"collect-profiles-29496255-wrwqn\" (UID: \"93a62959-e47a-4348-9f46-90ade5be3344\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496255-wrwqn" Jan 30 12:15:00 crc kubenswrapper[4869]: I0130 12:15:00.480006 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496255-wrwqn" Jan 30 12:15:00 crc kubenswrapper[4869]: I0130 12:15:00.938058 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496255-wrwqn"] Jan 30 12:15:01 crc kubenswrapper[4869]: I0130 12:15:01.302369 4869 generic.go:334] "Generic (PLEG): container finished" podID="93a62959-e47a-4348-9f46-90ade5be3344" containerID="d4bcedaa8a93e9c353aa5eb2505128a8949721f0b080a942e144d8b201457472" exitCode=0 Jan 30 12:15:01 crc kubenswrapper[4869]: I0130 12:15:01.302503 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496255-wrwqn" event={"ID":"93a62959-e47a-4348-9f46-90ade5be3344","Type":"ContainerDied","Data":"d4bcedaa8a93e9c353aa5eb2505128a8949721f0b080a942e144d8b201457472"} Jan 30 12:15:01 crc kubenswrapper[4869]: I0130 12:15:01.302772 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496255-wrwqn" event={"ID":"93a62959-e47a-4348-9f46-90ade5be3344","Type":"ContainerStarted","Data":"0943f579b279bede870d79ff5fb75e92b614b74f47d2fa4c7a6d10bde0495a34"} Jan 30 12:15:02 crc kubenswrapper[4869]: I0130 12:15:02.577138 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496255-wrwqn" Jan 30 12:15:02 crc kubenswrapper[4869]: I0130 12:15:02.713542 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vzsd\" (UniqueName: \"kubernetes.io/projected/93a62959-e47a-4348-9f46-90ade5be3344-kube-api-access-4vzsd\") pod \"93a62959-e47a-4348-9f46-90ade5be3344\" (UID: \"93a62959-e47a-4348-9f46-90ade5be3344\") " Jan 30 12:15:02 crc kubenswrapper[4869]: I0130 12:15:02.714005 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/93a62959-e47a-4348-9f46-90ade5be3344-secret-volume\") pod \"93a62959-e47a-4348-9f46-90ade5be3344\" (UID: \"93a62959-e47a-4348-9f46-90ade5be3344\") " Jan 30 12:15:02 crc kubenswrapper[4869]: I0130 12:15:02.714061 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/93a62959-e47a-4348-9f46-90ade5be3344-config-volume\") pod \"93a62959-e47a-4348-9f46-90ade5be3344\" (UID: \"93a62959-e47a-4348-9f46-90ade5be3344\") " Jan 30 12:15:02 crc kubenswrapper[4869]: I0130 12:15:02.714879 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93a62959-e47a-4348-9f46-90ade5be3344-config-volume" (OuterVolumeSpecName: "config-volume") pod "93a62959-e47a-4348-9f46-90ade5be3344" (UID: "93a62959-e47a-4348-9f46-90ade5be3344"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 12:15:02 crc kubenswrapper[4869]: I0130 12:15:02.718993 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93a62959-e47a-4348-9f46-90ade5be3344-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "93a62959-e47a-4348-9f46-90ade5be3344" (UID: "93a62959-e47a-4348-9f46-90ade5be3344"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 12:15:02 crc kubenswrapper[4869]: I0130 12:15:02.719024 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93a62959-e47a-4348-9f46-90ade5be3344-kube-api-access-4vzsd" (OuterVolumeSpecName: "kube-api-access-4vzsd") pod "93a62959-e47a-4348-9f46-90ade5be3344" (UID: "93a62959-e47a-4348-9f46-90ade5be3344"). InnerVolumeSpecName "kube-api-access-4vzsd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:15:02 crc kubenswrapper[4869]: I0130 12:15:02.816083 4869 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/93a62959-e47a-4348-9f46-90ade5be3344-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 30 12:15:02 crc kubenswrapper[4869]: I0130 12:15:02.816205 4869 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/93a62959-e47a-4348-9f46-90ade5be3344-config-volume\") on node \"crc\" DevicePath \"\"" Jan 30 12:15:02 crc kubenswrapper[4869]: I0130 12:15:02.816219 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vzsd\" (UniqueName: \"kubernetes.io/projected/93a62959-e47a-4348-9f46-90ade5be3344-kube-api-access-4vzsd\") on node \"crc\" DevicePath \"\"" Jan 30 12:15:03 crc kubenswrapper[4869]: I0130 12:15:03.317795 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496255-wrwqn" event={"ID":"93a62959-e47a-4348-9f46-90ade5be3344","Type":"ContainerDied","Data":"0943f579b279bede870d79ff5fb75e92b614b74f47d2fa4c7a6d10bde0495a34"} Jan 30 12:15:03 crc kubenswrapper[4869]: I0130 12:15:03.317844 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0943f579b279bede870d79ff5fb75e92b614b74f47d2fa4c7a6d10bde0495a34" Jan 30 12:15:03 crc kubenswrapper[4869]: I0130 12:15:03.317850 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496255-wrwqn" Jan 30 12:15:03 crc kubenswrapper[4869]: I0130 12:15:03.650053 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496210-cc4vr"] Jan 30 12:15:03 crc kubenswrapper[4869]: I0130 12:15:03.657542 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496210-cc4vr"] Jan 30 12:15:04 crc kubenswrapper[4869]: I0130 12:15:04.144007 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53231f02-8fd4-44e9-9181-d6e127dfdd42" path="/var/lib/kubelet/pods/53231f02-8fd4-44e9-9181-d6e127dfdd42/volumes" Jan 30 12:15:05 crc kubenswrapper[4869]: I0130 12:15:05.133645 4869 scope.go:117] "RemoveContainer" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" Jan 30 12:15:05 crc kubenswrapper[4869]: E0130 12:15:05.133868 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:15:15 crc kubenswrapper[4869]: I0130 12:15:15.880037 4869 scope.go:117] "RemoveContainer" containerID="9ca7d0e53d0dd09c51b2eb8f226a19ee05cd94c34d0eaeded756d8e633b552fd" Jan 30 12:15:20 crc kubenswrapper[4869]: I0130 12:15:20.137500 4869 scope.go:117] "RemoveContainer" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" Jan 30 12:15:20 crc kubenswrapper[4869]: E0130 12:15:20.139427 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:15:35 crc kubenswrapper[4869]: I0130 12:15:35.133111 4869 scope.go:117] "RemoveContainer" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" Jan 30 12:15:35 crc kubenswrapper[4869]: I0130 12:15:35.681566 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerStarted","Data":"2753fcbf560309b5b7e37904d0d2cf7f1caef840cef8861e52eecbabf9d52a12"} Jan 30 12:16:42 crc kubenswrapper[4869]: I0130 12:16:42.726762 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-copy-data"] Jan 30 12:16:42 crc kubenswrapper[4869]: E0130 12:16:42.727843 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93a62959-e47a-4348-9f46-90ade5be3344" containerName="collect-profiles" Jan 30 12:16:42 crc kubenswrapper[4869]: I0130 12:16:42.727863 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="93a62959-e47a-4348-9f46-90ade5be3344" containerName="collect-profiles" Jan 30 12:16:42 crc kubenswrapper[4869]: I0130 12:16:42.728041 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="93a62959-e47a-4348-9f46-90ade5be3344" containerName="collect-profiles" Jan 30 12:16:42 crc kubenswrapper[4869]: I0130 12:16:42.728639 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Jan 30 12:16:42 crc kubenswrapper[4869]: I0130 12:16:42.731809 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-mpj6x" Jan 30 12:16:42 crc kubenswrapper[4869]: I0130 12:16:42.736158 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Jan 30 12:16:42 crc kubenswrapper[4869]: I0130 12:16:42.865146 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-bd0e8ba9-950f-46dd-b680-eb83a73af1ee\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-bd0e8ba9-950f-46dd-b680-eb83a73af1ee\") pod \"mariadb-copy-data\" (UID: \"776504e9-6afb-4bd8-bab6-12aabf4b81f0\") " pod="openstack/mariadb-copy-data" Jan 30 12:16:42 crc kubenswrapper[4869]: I0130 12:16:42.865221 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xnjv\" (UniqueName: \"kubernetes.io/projected/776504e9-6afb-4bd8-bab6-12aabf4b81f0-kube-api-access-4xnjv\") pod \"mariadb-copy-data\" (UID: \"776504e9-6afb-4bd8-bab6-12aabf4b81f0\") " pod="openstack/mariadb-copy-data" Jan 30 12:16:42 crc kubenswrapper[4869]: I0130 12:16:42.966094 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-bd0e8ba9-950f-46dd-b680-eb83a73af1ee\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-bd0e8ba9-950f-46dd-b680-eb83a73af1ee\") pod \"mariadb-copy-data\" (UID: \"776504e9-6afb-4bd8-bab6-12aabf4b81f0\") " pod="openstack/mariadb-copy-data" Jan 30 12:16:42 crc kubenswrapper[4869]: I0130 12:16:42.966152 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xnjv\" (UniqueName: \"kubernetes.io/projected/776504e9-6afb-4bd8-bab6-12aabf4b81f0-kube-api-access-4xnjv\") pod \"mariadb-copy-data\" (UID: \"776504e9-6afb-4bd8-bab6-12aabf4b81f0\") " pod="openstack/mariadb-copy-data" Jan 30 12:16:42 crc kubenswrapper[4869]: I0130 12:16:42.969953 4869 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 12:16:42 crc kubenswrapper[4869]: I0130 12:16:42.970006 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-bd0e8ba9-950f-46dd-b680-eb83a73af1ee\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-bd0e8ba9-950f-46dd-b680-eb83a73af1ee\") pod \"mariadb-copy-data\" (UID: \"776504e9-6afb-4bd8-bab6-12aabf4b81f0\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/8509b25790b53d98073e45d5db9688700f288834594d93d3a1c8cb72df05456f/globalmount\"" pod="openstack/mariadb-copy-data" Jan 30 12:16:42 crc kubenswrapper[4869]: I0130 12:16:42.987616 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xnjv\" (UniqueName: \"kubernetes.io/projected/776504e9-6afb-4bd8-bab6-12aabf4b81f0-kube-api-access-4xnjv\") pod \"mariadb-copy-data\" (UID: \"776504e9-6afb-4bd8-bab6-12aabf4b81f0\") " pod="openstack/mariadb-copy-data" Jan 30 12:16:42 crc kubenswrapper[4869]: I0130 12:16:42.999453 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-bd0e8ba9-950f-46dd-b680-eb83a73af1ee\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-bd0e8ba9-950f-46dd-b680-eb83a73af1ee\") pod \"mariadb-copy-data\" (UID: \"776504e9-6afb-4bd8-bab6-12aabf4b81f0\") " pod="openstack/mariadb-copy-data" Jan 30 12:16:43 crc kubenswrapper[4869]: I0130 12:16:43.053532 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Jan 30 12:16:43 crc kubenswrapper[4869]: I0130 12:16:43.554057 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Jan 30 12:16:44 crc kubenswrapper[4869]: I0130 12:16:44.190319 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"776504e9-6afb-4bd8-bab6-12aabf4b81f0","Type":"ContainerStarted","Data":"668cea969cf84c1fda48354031e9a646e02501d27d6eb6f5e06556837fd921ea"} Jan 30 12:16:44 crc kubenswrapper[4869]: I0130 12:16:44.190670 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"776504e9-6afb-4bd8-bab6-12aabf4b81f0","Type":"ContainerStarted","Data":"461978fdf6335bc9675da25f796f91d41cb1a83bd6d4d761d8771724957f45f1"} Jan 30 12:16:44 crc kubenswrapper[4869]: I0130 12:16:44.206876 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-copy-data" podStartSLOduration=3.206848934 podStartE2EDuration="3.206848934s" podCreationTimestamp="2026-01-30 12:16:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:16:44.205948998 +0000 UTC m=+4954.755825074" watchObservedRunningTime="2026-01-30 12:16:44.206848934 +0000 UTC m=+4954.756725000" Jan 30 12:16:46 crc kubenswrapper[4869]: I0130 12:16:46.994904 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Jan 30 12:16:46 crc kubenswrapper[4869]: I0130 12:16:46.997909 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 30 12:16:47 crc kubenswrapper[4869]: I0130 12:16:47.005329 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 30 12:16:47 crc kubenswrapper[4869]: I0130 12:16:47.138828 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qwj5\" (UniqueName: \"kubernetes.io/projected/a1bb1cd7-c26f-4042-a655-022966162501-kube-api-access-7qwj5\") pod \"mariadb-client\" (UID: \"a1bb1cd7-c26f-4042-a655-022966162501\") " pod="openstack/mariadb-client" Jan 30 12:16:47 crc kubenswrapper[4869]: I0130 12:16:47.241441 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qwj5\" (UniqueName: \"kubernetes.io/projected/a1bb1cd7-c26f-4042-a655-022966162501-kube-api-access-7qwj5\") pod \"mariadb-client\" (UID: \"a1bb1cd7-c26f-4042-a655-022966162501\") " pod="openstack/mariadb-client" Jan 30 12:16:47 crc kubenswrapper[4869]: I0130 12:16:47.261721 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qwj5\" (UniqueName: \"kubernetes.io/projected/a1bb1cd7-c26f-4042-a655-022966162501-kube-api-access-7qwj5\") pod \"mariadb-client\" (UID: \"a1bb1cd7-c26f-4042-a655-022966162501\") " pod="openstack/mariadb-client" Jan 30 12:16:47 crc kubenswrapper[4869]: I0130 12:16:47.328584 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 30 12:16:47 crc kubenswrapper[4869]: I0130 12:16:47.795665 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 30 12:16:48 crc kubenswrapper[4869]: I0130 12:16:48.225961 4869 generic.go:334] "Generic (PLEG): container finished" podID="a1bb1cd7-c26f-4042-a655-022966162501" containerID="006c1c93d8852adf389282acb45af71ed6e5889249dc6644b13c3aba4bd4fdda" exitCode=0 Jan 30 12:16:48 crc kubenswrapper[4869]: I0130 12:16:48.226081 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"a1bb1cd7-c26f-4042-a655-022966162501","Type":"ContainerDied","Data":"006c1c93d8852adf389282acb45af71ed6e5889249dc6644b13c3aba4bd4fdda"} Jan 30 12:16:48 crc kubenswrapper[4869]: I0130 12:16:48.226317 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"a1bb1cd7-c26f-4042-a655-022966162501","Type":"ContainerStarted","Data":"5694a88b9ecde992b5a56d6596d90ee71b57a19cf712e70e43753a09df4a6ae8"} Jan 30 12:16:49 crc kubenswrapper[4869]: I0130 12:16:49.517153 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 30 12:16:49 crc kubenswrapper[4869]: I0130 12:16:49.549884 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_a1bb1cd7-c26f-4042-a655-022966162501/mariadb-client/0.log" Jan 30 12:16:49 crc kubenswrapper[4869]: I0130 12:16:49.580592 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Jan 30 12:16:49 crc kubenswrapper[4869]: I0130 12:16:49.586588 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Jan 30 12:16:49 crc kubenswrapper[4869]: I0130 12:16:49.695755 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7qwj5\" (UniqueName: \"kubernetes.io/projected/a1bb1cd7-c26f-4042-a655-022966162501-kube-api-access-7qwj5\") pod \"a1bb1cd7-c26f-4042-a655-022966162501\" (UID: \"a1bb1cd7-c26f-4042-a655-022966162501\") " Jan 30 12:16:49 crc kubenswrapper[4869]: I0130 12:16:49.705961 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1bb1cd7-c26f-4042-a655-022966162501-kube-api-access-7qwj5" (OuterVolumeSpecName: "kube-api-access-7qwj5") pod "a1bb1cd7-c26f-4042-a655-022966162501" (UID: "a1bb1cd7-c26f-4042-a655-022966162501"). InnerVolumeSpecName "kube-api-access-7qwj5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:16:49 crc kubenswrapper[4869]: I0130 12:16:49.747073 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Jan 30 12:16:49 crc kubenswrapper[4869]: E0130 12:16:49.748017 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1bb1cd7-c26f-4042-a655-022966162501" containerName="mariadb-client" Jan 30 12:16:49 crc kubenswrapper[4869]: I0130 12:16:49.748112 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1bb1cd7-c26f-4042-a655-022966162501" containerName="mariadb-client" Jan 30 12:16:49 crc kubenswrapper[4869]: I0130 12:16:49.748329 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1bb1cd7-c26f-4042-a655-022966162501" containerName="mariadb-client" Jan 30 12:16:49 crc kubenswrapper[4869]: I0130 12:16:49.748942 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 30 12:16:49 crc kubenswrapper[4869]: I0130 12:16:49.749126 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 30 12:16:49 crc kubenswrapper[4869]: I0130 12:16:49.798171 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7qwj5\" (UniqueName: \"kubernetes.io/projected/a1bb1cd7-c26f-4042-a655-022966162501-kube-api-access-7qwj5\") on node \"crc\" DevicePath \"\"" Jan 30 12:16:49 crc kubenswrapper[4869]: I0130 12:16:49.899615 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvgbq\" (UniqueName: \"kubernetes.io/projected/be0a2787-1b64-4aaf-a8c7-e16a2843e6a8-kube-api-access-pvgbq\") pod \"mariadb-client\" (UID: \"be0a2787-1b64-4aaf-a8c7-e16a2843e6a8\") " pod="openstack/mariadb-client" Jan 30 12:16:50 crc kubenswrapper[4869]: I0130 12:16:50.001904 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvgbq\" (UniqueName: \"kubernetes.io/projected/be0a2787-1b64-4aaf-a8c7-e16a2843e6a8-kube-api-access-pvgbq\") pod \"mariadb-client\" (UID: \"be0a2787-1b64-4aaf-a8c7-e16a2843e6a8\") " pod="openstack/mariadb-client" Jan 30 12:16:50 crc kubenswrapper[4869]: I0130 12:16:50.019769 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvgbq\" (UniqueName: \"kubernetes.io/projected/be0a2787-1b64-4aaf-a8c7-e16a2843e6a8-kube-api-access-pvgbq\") pod \"mariadb-client\" (UID: \"be0a2787-1b64-4aaf-a8c7-e16a2843e6a8\") " pod="openstack/mariadb-client" Jan 30 12:16:50 crc kubenswrapper[4869]: I0130 12:16:50.098317 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 30 12:16:50 crc kubenswrapper[4869]: I0130 12:16:50.159822 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1bb1cd7-c26f-4042-a655-022966162501" path="/var/lib/kubelet/pods/a1bb1cd7-c26f-4042-a655-022966162501/volumes" Jan 30 12:16:50 crc kubenswrapper[4869]: I0130 12:16:50.247265 4869 scope.go:117] "RemoveContainer" containerID="006c1c93d8852adf389282acb45af71ed6e5889249dc6644b13c3aba4bd4fdda" Jan 30 12:16:50 crc kubenswrapper[4869]: I0130 12:16:50.247421 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 30 12:16:50 crc kubenswrapper[4869]: W0130 12:16:50.318052 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbe0a2787_1b64_4aaf_a8c7_e16a2843e6a8.slice/crio-627e018e33d5dffd37bc1f2c7562c142a767b2a7075eb110ce14240a440d74de WatchSource:0}: Error finding container 627e018e33d5dffd37bc1f2c7562c142a767b2a7075eb110ce14240a440d74de: Status 404 returned error can't find the container with id 627e018e33d5dffd37bc1f2c7562c142a767b2a7075eb110ce14240a440d74de Jan 30 12:16:50 crc kubenswrapper[4869]: I0130 12:16:50.318128 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 30 12:16:51 crc kubenswrapper[4869]: I0130 12:16:51.257362 4869 generic.go:334] "Generic (PLEG): container finished" podID="be0a2787-1b64-4aaf-a8c7-e16a2843e6a8" containerID="cb67bf96b90e0088f5ec0c897d9916ea8c6f143b8f35ff492bdf1b76eba2613b" exitCode=0 Jan 30 12:16:51 crc kubenswrapper[4869]: I0130 12:16:51.257411 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"be0a2787-1b64-4aaf-a8c7-e16a2843e6a8","Type":"ContainerDied","Data":"cb67bf96b90e0088f5ec0c897d9916ea8c6f143b8f35ff492bdf1b76eba2613b"} Jan 30 12:16:51 crc kubenswrapper[4869]: I0130 12:16:51.257441 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"be0a2787-1b64-4aaf-a8c7-e16a2843e6a8","Type":"ContainerStarted","Data":"627e018e33d5dffd37bc1f2c7562c142a767b2a7075eb110ce14240a440d74de"} Jan 30 12:16:52 crc kubenswrapper[4869]: I0130 12:16:52.575834 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 30 12:16:52 crc kubenswrapper[4869]: I0130 12:16:52.594997 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_be0a2787-1b64-4aaf-a8c7-e16a2843e6a8/mariadb-client/0.log" Jan 30 12:16:52 crc kubenswrapper[4869]: I0130 12:16:52.617937 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Jan 30 12:16:52 crc kubenswrapper[4869]: I0130 12:16:52.624010 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Jan 30 12:16:52 crc kubenswrapper[4869]: I0130 12:16:52.743684 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pvgbq\" (UniqueName: \"kubernetes.io/projected/be0a2787-1b64-4aaf-a8c7-e16a2843e6a8-kube-api-access-pvgbq\") pod \"be0a2787-1b64-4aaf-a8c7-e16a2843e6a8\" (UID: \"be0a2787-1b64-4aaf-a8c7-e16a2843e6a8\") " Jan 30 12:16:52 crc kubenswrapper[4869]: I0130 12:16:52.749425 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be0a2787-1b64-4aaf-a8c7-e16a2843e6a8-kube-api-access-pvgbq" (OuterVolumeSpecName: "kube-api-access-pvgbq") pod "be0a2787-1b64-4aaf-a8c7-e16a2843e6a8" (UID: "be0a2787-1b64-4aaf-a8c7-e16a2843e6a8"). InnerVolumeSpecName "kube-api-access-pvgbq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:16:52 crc kubenswrapper[4869]: I0130 12:16:52.845102 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pvgbq\" (UniqueName: \"kubernetes.io/projected/be0a2787-1b64-4aaf-a8c7-e16a2843e6a8-kube-api-access-pvgbq\") on node \"crc\" DevicePath \"\"" Jan 30 12:16:53 crc kubenswrapper[4869]: I0130 12:16:53.273645 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="627e018e33d5dffd37bc1f2c7562c142a767b2a7075eb110ce14240a440d74de" Jan 30 12:16:53 crc kubenswrapper[4869]: I0130 12:16:53.273748 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 30 12:16:54 crc kubenswrapper[4869]: I0130 12:16:54.143465 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be0a2787-1b64-4aaf-a8c7-e16a2843e6a8" path="/var/lib/kubelet/pods/be0a2787-1b64-4aaf-a8c7-e16a2843e6a8/volumes" Jan 30 12:17:15 crc kubenswrapper[4869]: I0130 12:17:15.948521 4869 scope.go:117] "RemoveContainer" containerID="68b551b931c11eb40df709c47115a0d7e0a186cdbab37c847b08e3b95bd7178d" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.029524 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 30 12:17:33 crc kubenswrapper[4869]: E0130 12:17:33.030449 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be0a2787-1b64-4aaf-a8c7-e16a2843e6a8" containerName="mariadb-client" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.030464 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="be0a2787-1b64-4aaf-a8c7-e16a2843e6a8" containerName="mariadb-client" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.030648 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="be0a2787-1b64-4aaf-a8c7-e16a2843e6a8" containerName="mariadb-client" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.031431 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.038682 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.040315 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-j5mb7" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.040342 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.047247 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.053686 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-1"] Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.055404 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.059704 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-2"] Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.061563 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.102408 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.114403 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.182357 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3090f286-0536-4b7a-ae11-f9fff3403717-config\") pod \"ovsdbserver-nb-1\" (UID: \"3090f286-0536-4b7a-ae11-f9fff3403717\") " pod="openstack/ovsdbserver-nb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.182405 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvkw9\" (UniqueName: \"kubernetes.io/projected/d68922d2-7bd8-45a4-94ca-742713db6ceb-kube-api-access-tvkw9\") pod \"ovsdbserver-nb-0\" (UID: \"d68922d2-7bd8-45a4-94ca-742713db6ceb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.182432 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwtqc\" (UniqueName: \"kubernetes.io/projected/3090f286-0536-4b7a-ae11-f9fff3403717-kube-api-access-hwtqc\") pod \"ovsdbserver-nb-1\" (UID: \"3090f286-0536-4b7a-ae11-f9fff3403717\") " pod="openstack/ovsdbserver-nb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.182450 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-845a516d-22ca-4811-9b59-0e71b3d52ee5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-845a516d-22ca-4811-9b59-0e71b3d52ee5\") pod \"ovsdbserver-nb-1\" (UID: \"3090f286-0536-4b7a-ae11-f9fff3403717\") " pod="openstack/ovsdbserver-nb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.182473 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3090f286-0536-4b7a-ae11-f9fff3403717-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"3090f286-0536-4b7a-ae11-f9fff3403717\") " pod="openstack/ovsdbserver-nb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.182511 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2vtf\" (UniqueName: \"kubernetes.io/projected/6ca04c49-f111-4cf1-a9aa-5da441c05b28-kube-api-access-c2vtf\") pod \"ovsdbserver-nb-2\" (UID: \"6ca04c49-f111-4cf1-a9aa-5da441c05b28\") " pod="openstack/ovsdbserver-nb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.182527 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3090f286-0536-4b7a-ae11-f9fff3403717-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"3090f286-0536-4b7a-ae11-f9fff3403717\") " pod="openstack/ovsdbserver-nb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.182547 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d68922d2-7bd8-45a4-94ca-742713db6ceb-config\") pod \"ovsdbserver-nb-0\" (UID: \"d68922d2-7bd8-45a4-94ca-742713db6ceb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.182561 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ca04c49-f111-4cf1-a9aa-5da441c05b28-config\") pod \"ovsdbserver-nb-2\" (UID: \"6ca04c49-f111-4cf1-a9aa-5da441c05b28\") " pod="openstack/ovsdbserver-nb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.182576 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6ca04c49-f111-4cf1-a9aa-5da441c05b28-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"6ca04c49-f111-4cf1-a9aa-5da441c05b28\") " pod="openstack/ovsdbserver-nb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.182593 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-6bad3d65-b5c2-4b5a-b779-ceffa256eba6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6bad3d65-b5c2-4b5a-b779-ceffa256eba6\") pod \"ovsdbserver-nb-2\" (UID: \"6ca04c49-f111-4cf1-a9aa-5da441c05b28\") " pod="openstack/ovsdbserver-nb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.182610 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ca04c49-f111-4cf1-a9aa-5da441c05b28-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"6ca04c49-f111-4cf1-a9aa-5da441c05b28\") " pod="openstack/ovsdbserver-nb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.182626 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d68922d2-7bd8-45a4-94ca-742713db6ceb-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"d68922d2-7bd8-45a4-94ca-742713db6ceb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.182651 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3090f286-0536-4b7a-ae11-f9fff3403717-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"3090f286-0536-4b7a-ae11-f9fff3403717\") " pod="openstack/ovsdbserver-nb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.182667 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d68922d2-7bd8-45a4-94ca-742713db6ceb-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"d68922d2-7bd8-45a4-94ca-742713db6ceb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.182690 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d68922d2-7bd8-45a4-94ca-742713db6ceb-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"d68922d2-7bd8-45a4-94ca-742713db6ceb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.182741 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-d90a458b-3fce-4e5a-9235-6d3badce4b4f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d90a458b-3fce-4e5a-9235-6d3badce4b4f\") pod \"ovsdbserver-nb-0\" (UID: \"d68922d2-7bd8-45a4-94ca-742713db6ceb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.182772 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6ca04c49-f111-4cf1-a9aa-5da441c05b28-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"6ca04c49-f111-4cf1-a9aa-5da441c05b28\") " pod="openstack/ovsdbserver-nb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.229417 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.230933 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.235082 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-9t8fz" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.236965 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.237465 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.244271 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-1"] Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.245918 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.256852 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.269487 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-2"] Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.271135 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.289063 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.296299 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3090f286-0536-4b7a-ae11-f9fff3403717-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"3090f286-0536-4b7a-ae11-f9fff3403717\") " pod="openstack/ovsdbserver-nb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.296376 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d68922d2-7bd8-45a4-94ca-742713db6ceb-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"d68922d2-7bd8-45a4-94ca-742713db6ceb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.296428 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d68922d2-7bd8-45a4-94ca-742713db6ceb-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"d68922d2-7bd8-45a4-94ca-742713db6ceb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.296468 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-d90a458b-3fce-4e5a-9235-6d3badce4b4f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d90a458b-3fce-4e5a-9235-6d3badce4b4f\") pod \"ovsdbserver-nb-0\" (UID: \"d68922d2-7bd8-45a4-94ca-742713db6ceb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.296518 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec554cf4-4bb3-4861-9763-1d754d0f2c2e-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"ec554cf4-4bb3-4861-9763-1d754d0f2c2e\") " pod="openstack/ovsdbserver-sb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.296557 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6ca04c49-f111-4cf1-a9aa-5da441c05b28-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"6ca04c49-f111-4cf1-a9aa-5da441c05b28\") " pod="openstack/ovsdbserver-nb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.296707 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3090f286-0536-4b7a-ae11-f9fff3403717-config\") pod \"ovsdbserver-nb-1\" (UID: \"3090f286-0536-4b7a-ae11-f9fff3403717\") " pod="openstack/ovsdbserver-nb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.296770 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ec554cf4-4bb3-4861-9763-1d754d0f2c2e-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"ec554cf4-4bb3-4861-9763-1d754d0f2c2e\") " pod="openstack/ovsdbserver-sb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.296811 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvkw9\" (UniqueName: \"kubernetes.io/projected/d68922d2-7bd8-45a4-94ca-742713db6ceb-kube-api-access-tvkw9\") pod \"ovsdbserver-nb-0\" (UID: \"d68922d2-7bd8-45a4-94ca-742713db6ceb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.296856 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddc2m\" (UniqueName: \"kubernetes.io/projected/ec554cf4-4bb3-4861-9763-1d754d0f2c2e-kube-api-access-ddc2m\") pod \"ovsdbserver-sb-0\" (UID: \"ec554cf4-4bb3-4861-9763-1d754d0f2c2e\") " pod="openstack/ovsdbserver-sb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.296897 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwtqc\" (UniqueName: \"kubernetes.io/projected/3090f286-0536-4b7a-ae11-f9fff3403717-kube-api-access-hwtqc\") pod \"ovsdbserver-nb-1\" (UID: \"3090f286-0536-4b7a-ae11-f9fff3403717\") " pod="openstack/ovsdbserver-nb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.296935 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-845a516d-22ca-4811-9b59-0e71b3d52ee5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-845a516d-22ca-4811-9b59-0e71b3d52ee5\") pod \"ovsdbserver-nb-1\" (UID: \"3090f286-0536-4b7a-ae11-f9fff3403717\") " pod="openstack/ovsdbserver-nb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.296970 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3090f286-0536-4b7a-ae11-f9fff3403717-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"3090f286-0536-4b7a-ae11-f9fff3403717\") " pod="openstack/ovsdbserver-nb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.297043 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ec554cf4-4bb3-4861-9763-1d754d0f2c2e-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"ec554cf4-4bb3-4861-9763-1d754d0f2c2e\") " pod="openstack/ovsdbserver-sb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.297112 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f1b7fa5c-a7be-4c64-abab-af0b835a154a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f1b7fa5c-a7be-4c64-abab-af0b835a154a\") pod \"ovsdbserver-sb-0\" (UID: \"ec554cf4-4bb3-4861-9763-1d754d0f2c2e\") " pod="openstack/ovsdbserver-sb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.297156 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2vtf\" (UniqueName: \"kubernetes.io/projected/6ca04c49-f111-4cf1-a9aa-5da441c05b28-kube-api-access-c2vtf\") pod \"ovsdbserver-nb-2\" (UID: \"6ca04c49-f111-4cf1-a9aa-5da441c05b28\") " pod="openstack/ovsdbserver-nb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.297181 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec554cf4-4bb3-4861-9763-1d754d0f2c2e-config\") pod \"ovsdbserver-sb-0\" (UID: \"ec554cf4-4bb3-4861-9763-1d754d0f2c2e\") " pod="openstack/ovsdbserver-sb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.297214 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3090f286-0536-4b7a-ae11-f9fff3403717-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"3090f286-0536-4b7a-ae11-f9fff3403717\") " pod="openstack/ovsdbserver-nb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.297260 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ca04c49-f111-4cf1-a9aa-5da441c05b28-config\") pod \"ovsdbserver-nb-2\" (UID: \"6ca04c49-f111-4cf1-a9aa-5da441c05b28\") " pod="openstack/ovsdbserver-nb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.297296 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d68922d2-7bd8-45a4-94ca-742713db6ceb-config\") pod \"ovsdbserver-nb-0\" (UID: \"d68922d2-7bd8-45a4-94ca-742713db6ceb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.297324 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6ca04c49-f111-4cf1-a9aa-5da441c05b28-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"6ca04c49-f111-4cf1-a9aa-5da441c05b28\") " pod="openstack/ovsdbserver-nb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.297359 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-6bad3d65-b5c2-4b5a-b779-ceffa256eba6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6bad3d65-b5c2-4b5a-b779-ceffa256eba6\") pod \"ovsdbserver-nb-2\" (UID: \"6ca04c49-f111-4cf1-a9aa-5da441c05b28\") " pod="openstack/ovsdbserver-nb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.297396 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ca04c49-f111-4cf1-a9aa-5da441c05b28-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"6ca04c49-f111-4cf1-a9aa-5da441c05b28\") " pod="openstack/ovsdbserver-nb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.297430 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d68922d2-7bd8-45a4-94ca-742713db6ceb-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"d68922d2-7bd8-45a4-94ca-742713db6ceb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.298167 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d68922d2-7bd8-45a4-94ca-742713db6ceb-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"d68922d2-7bd8-45a4-94ca-742713db6ceb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.299125 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3090f286-0536-4b7a-ae11-f9fff3403717-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"3090f286-0536-4b7a-ae11-f9fff3403717\") " pod="openstack/ovsdbserver-nb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.299328 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d68922d2-7bd8-45a4-94ca-742713db6ceb-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"d68922d2-7bd8-45a4-94ca-742713db6ceb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.299584 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3090f286-0536-4b7a-ae11-f9fff3403717-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"3090f286-0536-4b7a-ae11-f9fff3403717\") " pod="openstack/ovsdbserver-nb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.300469 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ca04c49-f111-4cf1-a9aa-5da441c05b28-config\") pod \"ovsdbserver-nb-2\" (UID: \"6ca04c49-f111-4cf1-a9aa-5da441c05b28\") " pod="openstack/ovsdbserver-nb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.301573 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6ca04c49-f111-4cf1-a9aa-5da441c05b28-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"6ca04c49-f111-4cf1-a9aa-5da441c05b28\") " pod="openstack/ovsdbserver-nb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.301737 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3090f286-0536-4b7a-ae11-f9fff3403717-config\") pod \"ovsdbserver-nb-1\" (UID: \"3090f286-0536-4b7a-ae11-f9fff3403717\") " pod="openstack/ovsdbserver-nb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.302184 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6ca04c49-f111-4cf1-a9aa-5da441c05b28-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"6ca04c49-f111-4cf1-a9aa-5da441c05b28\") " pod="openstack/ovsdbserver-nb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.304289 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d68922d2-7bd8-45a4-94ca-742713db6ceb-config\") pod \"ovsdbserver-nb-0\" (UID: \"d68922d2-7bd8-45a4-94ca-742713db6ceb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.305813 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3090f286-0536-4b7a-ae11-f9fff3403717-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"3090f286-0536-4b7a-ae11-f9fff3403717\") " pod="openstack/ovsdbserver-nb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.307166 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d68922d2-7bd8-45a4-94ca-742713db6ceb-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"d68922d2-7bd8-45a4-94ca-742713db6ceb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.308980 4869 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.309064 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-845a516d-22ca-4811-9b59-0e71b3d52ee5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-845a516d-22ca-4811-9b59-0e71b3d52ee5\") pod \"ovsdbserver-nb-1\" (UID: \"3090f286-0536-4b7a-ae11-f9fff3403717\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2a0e19da73185ec7f7126dd1728b542bca86be74ee30b338b23657546545eccc/globalmount\"" pod="openstack/ovsdbserver-nb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.310477 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ca04c49-f111-4cf1-a9aa-5da441c05b28-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"6ca04c49-f111-4cf1-a9aa-5da441c05b28\") " pod="openstack/ovsdbserver-nb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.322606 4869 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.322692 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-6bad3d65-b5c2-4b5a-b779-ceffa256eba6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6bad3d65-b5c2-4b5a-b779-ceffa256eba6\") pod \"ovsdbserver-nb-2\" (UID: \"6ca04c49-f111-4cf1-a9aa-5da441c05b28\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/82f7da34d9948cbe9b497b07c569a90a4300771b6117a69d5f8083a8efeb405a/globalmount\"" pod="openstack/ovsdbserver-nb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.324358 4869 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.324421 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-d90a458b-3fce-4e5a-9235-6d3badce4b4f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d90a458b-3fce-4e5a-9235-6d3badce4b4f\") pod \"ovsdbserver-nb-0\" (UID: \"d68922d2-7bd8-45a4-94ca-742713db6ceb\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/9fc932a72c2221b70b9e0ba6afc85e911a7bbcb0de20c95cf051c02e65ae3855/globalmount\"" pod="openstack/ovsdbserver-nb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.327308 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwtqc\" (UniqueName: \"kubernetes.io/projected/3090f286-0536-4b7a-ae11-f9fff3403717-kube-api-access-hwtqc\") pod \"ovsdbserver-nb-1\" (UID: \"3090f286-0536-4b7a-ae11-f9fff3403717\") " pod="openstack/ovsdbserver-nb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.327445 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2vtf\" (UniqueName: \"kubernetes.io/projected/6ca04c49-f111-4cf1-a9aa-5da441c05b28-kube-api-access-c2vtf\") pod \"ovsdbserver-nb-2\" (UID: \"6ca04c49-f111-4cf1-a9aa-5da441c05b28\") " pod="openstack/ovsdbserver-nb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.345547 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvkw9\" (UniqueName: \"kubernetes.io/projected/d68922d2-7bd8-45a4-94ca-742713db6ceb-kube-api-access-tvkw9\") pod \"ovsdbserver-nb-0\" (UID: \"d68922d2-7bd8-45a4-94ca-742713db6ceb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.349727 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.371731 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-6bad3d65-b5c2-4b5a-b779-ceffa256eba6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6bad3d65-b5c2-4b5a-b779-ceffa256eba6\") pod \"ovsdbserver-nb-2\" (UID: \"6ca04c49-f111-4cf1-a9aa-5da441c05b28\") " pod="openstack/ovsdbserver-nb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.378023 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-d90a458b-3fce-4e5a-9235-6d3badce4b4f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d90a458b-3fce-4e5a-9235-6d3badce4b4f\") pod \"ovsdbserver-nb-0\" (UID: \"d68922d2-7bd8-45a4-94ca-742713db6ceb\") " pod="openstack/ovsdbserver-nb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.380331 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-845a516d-22ca-4811-9b59-0e71b3d52ee5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-845a516d-22ca-4811-9b59-0e71b3d52ee5\") pod \"ovsdbserver-nb-1\" (UID: \"3090f286-0536-4b7a-ae11-f9fff3403717\") " pod="openstack/ovsdbserver-nb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.399207 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37b2cba6-4508-4425-af92-7514674301c4-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"37b2cba6-4508-4425-af92-7514674301c4\") " pod="openstack/ovsdbserver-sb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.399271 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f1b7fa5c-a7be-4c64-abab-af0b835a154a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f1b7fa5c-a7be-4c64-abab-af0b835a154a\") pod \"ovsdbserver-sb-0\" (UID: \"ec554cf4-4bb3-4861-9763-1d754d0f2c2e\") " pod="openstack/ovsdbserver-sb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.399302 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec554cf4-4bb3-4861-9763-1d754d0f2c2e-config\") pod \"ovsdbserver-sb-0\" (UID: \"ec554cf4-4bb3-4861-9763-1d754d0f2c2e\") " pod="openstack/ovsdbserver-sb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.399346 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37b2cba6-4508-4425-af92-7514674301c4-config\") pod \"ovsdbserver-sb-2\" (UID: \"37b2cba6-4508-4425-af92-7514674301c4\") " pod="openstack/ovsdbserver-sb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.399376 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85a8c097-912b-4208-bcbf-606581cfba77-config\") pod \"ovsdbserver-sb-1\" (UID: \"85a8c097-912b-4208-bcbf-606581cfba77\") " pod="openstack/ovsdbserver-sb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.399411 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmmm7\" (UniqueName: \"kubernetes.io/projected/85a8c097-912b-4208-bcbf-606581cfba77-kube-api-access-cmmm7\") pod \"ovsdbserver-sb-1\" (UID: \"85a8c097-912b-4208-bcbf-606581cfba77\") " pod="openstack/ovsdbserver-sb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.399444 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/85a8c097-912b-4208-bcbf-606581cfba77-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"85a8c097-912b-4208-bcbf-606581cfba77\") " pod="openstack/ovsdbserver-sb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.399471 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec554cf4-4bb3-4861-9763-1d754d0f2c2e-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"ec554cf4-4bb3-4861-9763-1d754d0f2c2e\") " pod="openstack/ovsdbserver-sb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.399532 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/37b2cba6-4508-4425-af92-7514674301c4-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"37b2cba6-4508-4425-af92-7514674301c4\") " pod="openstack/ovsdbserver-sb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.399567 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkzfd\" (UniqueName: \"kubernetes.io/projected/37b2cba6-4508-4425-af92-7514674301c4-kube-api-access-hkzfd\") pod \"ovsdbserver-sb-2\" (UID: \"37b2cba6-4508-4425-af92-7514674301c4\") " pod="openstack/ovsdbserver-sb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.399598 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-6b3d01c0-5c0f-4b50-ae7f-00987fc5ff46\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6b3d01c0-5c0f-4b50-ae7f-00987fc5ff46\") pod \"ovsdbserver-sb-1\" (UID: \"85a8c097-912b-4208-bcbf-606581cfba77\") " pod="openstack/ovsdbserver-sb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.399649 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ec554cf4-4bb3-4861-9763-1d754d0f2c2e-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"ec554cf4-4bb3-4861-9763-1d754d0f2c2e\") " pod="openstack/ovsdbserver-sb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.399678 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddc2m\" (UniqueName: \"kubernetes.io/projected/ec554cf4-4bb3-4861-9763-1d754d0f2c2e-kube-api-access-ddc2m\") pod \"ovsdbserver-sb-0\" (UID: \"ec554cf4-4bb3-4861-9763-1d754d0f2c2e\") " pod="openstack/ovsdbserver-sb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.399743 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/85a8c097-912b-4208-bcbf-606581cfba77-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"85a8c097-912b-4208-bcbf-606581cfba77\") " pod="openstack/ovsdbserver-sb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.399772 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ec554cf4-4bb3-4861-9763-1d754d0f2c2e-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"ec554cf4-4bb3-4861-9763-1d754d0f2c2e\") " pod="openstack/ovsdbserver-sb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.399793 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/37b2cba6-4508-4425-af92-7514674301c4-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"37b2cba6-4508-4425-af92-7514674301c4\") " pod="openstack/ovsdbserver-sb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.399815 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85a8c097-912b-4208-bcbf-606581cfba77-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"85a8c097-912b-4208-bcbf-606581cfba77\") " pod="openstack/ovsdbserver-sb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.399839 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-8b0b4ec4-5575-426c-be2d-c704e8b2cd3b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b0b4ec4-5575-426c-be2d-c704e8b2cd3b\") pod \"ovsdbserver-sb-2\" (UID: \"37b2cba6-4508-4425-af92-7514674301c4\") " pod="openstack/ovsdbserver-sb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.401194 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ec554cf4-4bb3-4861-9763-1d754d0f2c2e-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"ec554cf4-4bb3-4861-9763-1d754d0f2c2e\") " pod="openstack/ovsdbserver-sb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.401617 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ec554cf4-4bb3-4861-9763-1d754d0f2c2e-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"ec554cf4-4bb3-4861-9763-1d754d0f2c2e\") " pod="openstack/ovsdbserver-sb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.401738 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.402890 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec554cf4-4bb3-4861-9763-1d754d0f2c2e-config\") pod \"ovsdbserver-sb-0\" (UID: \"ec554cf4-4bb3-4861-9763-1d754d0f2c2e\") " pod="openstack/ovsdbserver-sb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.405155 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec554cf4-4bb3-4861-9763-1d754d0f2c2e-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"ec554cf4-4bb3-4861-9763-1d754d0f2c2e\") " pod="openstack/ovsdbserver-sb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.407434 4869 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.418530 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f1b7fa5c-a7be-4c64-abab-af0b835a154a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f1b7fa5c-a7be-4c64-abab-af0b835a154a\") pod \"ovsdbserver-sb-0\" (UID: \"ec554cf4-4bb3-4861-9763-1d754d0f2c2e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e5d6b886cad3e3206b04b6bb63f3925f3d2ba04a5e435c52c868be68a057ba14/globalmount\"" pod="openstack/ovsdbserver-sb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.418976 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.421689 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddc2m\" (UniqueName: \"kubernetes.io/projected/ec554cf4-4bb3-4861-9763-1d754d0f2c2e-kube-api-access-ddc2m\") pod \"ovsdbserver-sb-0\" (UID: \"ec554cf4-4bb3-4861-9763-1d754d0f2c2e\") " pod="openstack/ovsdbserver-sb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.458601 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f1b7fa5c-a7be-4c64-abab-af0b835a154a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f1b7fa5c-a7be-4c64-abab-af0b835a154a\") pod \"ovsdbserver-sb-0\" (UID: \"ec554cf4-4bb3-4861-9763-1d754d0f2c2e\") " pod="openstack/ovsdbserver-sb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.505915 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/85a8c097-912b-4208-bcbf-606581cfba77-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"85a8c097-912b-4208-bcbf-606581cfba77\") " pod="openstack/ovsdbserver-sb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.506219 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/37b2cba6-4508-4425-af92-7514674301c4-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"37b2cba6-4508-4425-af92-7514674301c4\") " pod="openstack/ovsdbserver-sb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.506237 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85a8c097-912b-4208-bcbf-606581cfba77-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"85a8c097-912b-4208-bcbf-606581cfba77\") " pod="openstack/ovsdbserver-sb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.506256 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-8b0b4ec4-5575-426c-be2d-c704e8b2cd3b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b0b4ec4-5575-426c-be2d-c704e8b2cd3b\") pod \"ovsdbserver-sb-2\" (UID: \"37b2cba6-4508-4425-af92-7514674301c4\") " pod="openstack/ovsdbserver-sb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.506282 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37b2cba6-4508-4425-af92-7514674301c4-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"37b2cba6-4508-4425-af92-7514674301c4\") " pod="openstack/ovsdbserver-sb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.506317 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37b2cba6-4508-4425-af92-7514674301c4-config\") pod \"ovsdbserver-sb-2\" (UID: \"37b2cba6-4508-4425-af92-7514674301c4\") " pod="openstack/ovsdbserver-sb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.506341 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85a8c097-912b-4208-bcbf-606581cfba77-config\") pod \"ovsdbserver-sb-1\" (UID: \"85a8c097-912b-4208-bcbf-606581cfba77\") " pod="openstack/ovsdbserver-sb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.506363 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmmm7\" (UniqueName: \"kubernetes.io/projected/85a8c097-912b-4208-bcbf-606581cfba77-kube-api-access-cmmm7\") pod \"ovsdbserver-sb-1\" (UID: \"85a8c097-912b-4208-bcbf-606581cfba77\") " pod="openstack/ovsdbserver-sb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.506388 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/85a8c097-912b-4208-bcbf-606581cfba77-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"85a8c097-912b-4208-bcbf-606581cfba77\") " pod="openstack/ovsdbserver-sb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.506411 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/37b2cba6-4508-4425-af92-7514674301c4-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"37b2cba6-4508-4425-af92-7514674301c4\") " pod="openstack/ovsdbserver-sb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.506434 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkzfd\" (UniqueName: \"kubernetes.io/projected/37b2cba6-4508-4425-af92-7514674301c4-kube-api-access-hkzfd\") pod \"ovsdbserver-sb-2\" (UID: \"37b2cba6-4508-4425-af92-7514674301c4\") " pod="openstack/ovsdbserver-sb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.506456 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-6b3d01c0-5c0f-4b50-ae7f-00987fc5ff46\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6b3d01c0-5c0f-4b50-ae7f-00987fc5ff46\") pod \"ovsdbserver-sb-1\" (UID: \"85a8c097-912b-4208-bcbf-606581cfba77\") " pod="openstack/ovsdbserver-sb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.507759 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37b2cba6-4508-4425-af92-7514674301c4-config\") pod \"ovsdbserver-sb-2\" (UID: \"37b2cba6-4508-4425-af92-7514674301c4\") " pod="openstack/ovsdbserver-sb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.508180 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/85a8c097-912b-4208-bcbf-606581cfba77-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"85a8c097-912b-4208-bcbf-606581cfba77\") " pod="openstack/ovsdbserver-sb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.508461 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/85a8c097-912b-4208-bcbf-606581cfba77-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"85a8c097-912b-4208-bcbf-606581cfba77\") " pod="openstack/ovsdbserver-sb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.508627 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/37b2cba6-4508-4425-af92-7514674301c4-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"37b2cba6-4508-4425-af92-7514674301c4\") " pod="openstack/ovsdbserver-sb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.508735 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85a8c097-912b-4208-bcbf-606581cfba77-config\") pod \"ovsdbserver-sb-1\" (UID: \"85a8c097-912b-4208-bcbf-606581cfba77\") " pod="openstack/ovsdbserver-sb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.509222 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/37b2cba6-4508-4425-af92-7514674301c4-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"37b2cba6-4508-4425-af92-7514674301c4\") " pod="openstack/ovsdbserver-sb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.512988 4869 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.513037 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-6b3d01c0-5c0f-4b50-ae7f-00987fc5ff46\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6b3d01c0-5c0f-4b50-ae7f-00987fc5ff46\") pod \"ovsdbserver-sb-1\" (UID: \"85a8c097-912b-4208-bcbf-606581cfba77\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/11d899ff7fa4cdc17fc80ac3317933ab9e99682399b08fab20632d4e12894654/globalmount\"" pod="openstack/ovsdbserver-sb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.514018 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85a8c097-912b-4208-bcbf-606581cfba77-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"85a8c097-912b-4208-bcbf-606581cfba77\") " pod="openstack/ovsdbserver-sb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.524523 4869 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.524585 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-8b0b4ec4-5575-426c-be2d-c704e8b2cd3b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b0b4ec4-5575-426c-be2d-c704e8b2cd3b\") pod \"ovsdbserver-sb-2\" (UID: \"37b2cba6-4508-4425-af92-7514674301c4\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/5a5a18d08b513b15bca85b12ac27e7e6d06dbd98516bd6222962a2933ec2b403/globalmount\"" pod="openstack/ovsdbserver-sb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.525977 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmmm7\" (UniqueName: \"kubernetes.io/projected/85a8c097-912b-4208-bcbf-606581cfba77-kube-api-access-cmmm7\") pod \"ovsdbserver-sb-1\" (UID: \"85a8c097-912b-4208-bcbf-606581cfba77\") " pod="openstack/ovsdbserver-sb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.527427 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkzfd\" (UniqueName: \"kubernetes.io/projected/37b2cba6-4508-4425-af92-7514674301c4-kube-api-access-hkzfd\") pod \"ovsdbserver-sb-2\" (UID: \"37b2cba6-4508-4425-af92-7514674301c4\") " pod="openstack/ovsdbserver-sb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.527988 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37b2cba6-4508-4425-af92-7514674301c4-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"37b2cba6-4508-4425-af92-7514674301c4\") " pod="openstack/ovsdbserver-sb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.551417 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-6b3d01c0-5c0f-4b50-ae7f-00987fc5ff46\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6b3d01c0-5c0f-4b50-ae7f-00987fc5ff46\") pod \"ovsdbserver-sb-1\" (UID: \"85a8c097-912b-4208-bcbf-606581cfba77\") " pod="openstack/ovsdbserver-sb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.569381 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.578147 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-8b0b4ec4-5575-426c-be2d-c704e8b2cd3b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b0b4ec4-5575-426c-be2d-c704e8b2cd3b\") pod \"ovsdbserver-sb-2\" (UID: \"37b2cba6-4508-4425-af92-7514674301c4\") " pod="openstack/ovsdbserver-sb-2" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.587801 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Jan 30 12:17:33 crc kubenswrapper[4869]: I0130 12:17:33.598331 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Jan 30 12:17:34 crc kubenswrapper[4869]: I0130 12:17:33.674486 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 30 12:17:34 crc kubenswrapper[4869]: I0130 12:17:33.944951 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Jan 30 12:17:34 crc kubenswrapper[4869]: W0130 12:17:33.956071 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6ca04c49_f111_4cf1_a9aa_5da441c05b28.slice/crio-1c9a5c72c3b07ee28a4209074ec07fb09d2bf1a037ed68ff72b23a814e7c9bc4 WatchSource:0}: Error finding container 1c9a5c72c3b07ee28a4209074ec07fb09d2bf1a037ed68ff72b23a814e7c9bc4: Status 404 returned error can't find the container with id 1c9a5c72c3b07ee28a4209074ec07fb09d2bf1a037ed68ff72b23a814e7c9bc4 Jan 30 12:17:34 crc kubenswrapper[4869]: I0130 12:17:34.073045 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Jan 30 12:17:34 crc kubenswrapper[4869]: W0130 12:17:34.083064 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3090f286_0536_4b7a_ae11_f9fff3403717.slice/crio-00161b9249d482e9fd34967bf398c79d1d75aa474924f560b3f29c677f16020b WatchSource:0}: Error finding container 00161b9249d482e9fd34967bf398c79d1d75aa474924f560b3f29c677f16020b: Status 404 returned error can't find the container with id 00161b9249d482e9fd34967bf398c79d1d75aa474924f560b3f29c677f16020b Jan 30 12:17:34 crc kubenswrapper[4869]: I0130 12:17:34.591333 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"3090f286-0536-4b7a-ae11-f9fff3403717","Type":"ContainerStarted","Data":"931ddd244b45d503f3cff486efd90c48b57f90d6a327950ab610670f4446e67e"} Jan 30 12:17:34 crc kubenswrapper[4869]: I0130 12:17:34.591747 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"3090f286-0536-4b7a-ae11-f9fff3403717","Type":"ContainerStarted","Data":"fd85fecda8357a5bb3fcd6e105db55ff7ee336a1ebfb4ab95b8436f65f8c7e11"} Jan 30 12:17:34 crc kubenswrapper[4869]: I0130 12:17:34.591762 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"3090f286-0536-4b7a-ae11-f9fff3403717","Type":"ContainerStarted","Data":"00161b9249d482e9fd34967bf398c79d1d75aa474924f560b3f29c677f16020b"} Jan 30 12:17:34 crc kubenswrapper[4869]: I0130 12:17:34.595626 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"6ca04c49-f111-4cf1-a9aa-5da441c05b28","Type":"ContainerStarted","Data":"af78d4143fb495777a0a61148cb835f1545aaae44e9a79493a3db05779863887"} Jan 30 12:17:34 crc kubenswrapper[4869]: I0130 12:17:34.595659 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"6ca04c49-f111-4cf1-a9aa-5da441c05b28","Type":"ContainerStarted","Data":"168c0034c4ac04aa7b312e76fa86c7a807e7205cc95780a7ad5460a7365cae10"} Jan 30 12:17:34 crc kubenswrapper[4869]: I0130 12:17:34.595668 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"6ca04c49-f111-4cf1-a9aa-5da441c05b28","Type":"ContainerStarted","Data":"1c9a5c72c3b07ee28a4209074ec07fb09d2bf1a037ed68ff72b23a814e7c9bc4"} Jan 30 12:17:34 crc kubenswrapper[4869]: I0130 12:17:34.622632 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-1" podStartSLOduration=2.622603457 podStartE2EDuration="2.622603457s" podCreationTimestamp="2026-01-30 12:17:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:17:34.610698739 +0000 UTC m=+5005.160574825" watchObservedRunningTime="2026-01-30 12:17:34.622603457 +0000 UTC m=+5005.172479523" Jan 30 12:17:34 crc kubenswrapper[4869]: I0130 12:17:34.638482 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-2" podStartSLOduration=2.638460067 podStartE2EDuration="2.638460067s" podCreationTimestamp="2026-01-30 12:17:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:17:34.630425009 +0000 UTC m=+5005.180301075" watchObservedRunningTime="2026-01-30 12:17:34.638460067 +0000 UTC m=+5005.188336133" Jan 30 12:17:34 crc kubenswrapper[4869]: I0130 12:17:34.837690 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 30 12:17:34 crc kubenswrapper[4869]: W0130 12:17:34.838786 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec554cf4_4bb3_4861_9763_1d754d0f2c2e.slice/crio-a9d002fe09f6dc5471669a92b317849c2a96eaa2cee942c2f63554661e8227ec WatchSource:0}: Error finding container a9d002fe09f6dc5471669a92b317849c2a96eaa2cee942c2f63554661e8227ec: Status 404 returned error can't find the container with id a9d002fe09f6dc5471669a92b317849c2a96eaa2cee942c2f63554661e8227ec Jan 30 12:17:34 crc kubenswrapper[4869]: I0130 12:17:34.934657 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Jan 30 12:17:34 crc kubenswrapper[4869]: W0130 12:17:34.944326 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85a8c097_912b_4208_bcbf_606581cfba77.slice/crio-e9d136c12ffdb1b7298805498739c2bfe70079a5b2ef782468086108e0f030d2 WatchSource:0}: Error finding container e9d136c12ffdb1b7298805498739c2bfe70079a5b2ef782468086108e0f030d2: Status 404 returned error can't find the container with id e9d136c12ffdb1b7298805498739c2bfe70079a5b2ef782468086108e0f030d2 Jan 30 12:17:35 crc kubenswrapper[4869]: I0130 12:17:35.381330 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Jan 30 12:17:35 crc kubenswrapper[4869]: W0130 12:17:35.386157 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod37b2cba6_4508_4425_af92_7514674301c4.slice/crio-38c6a5f53879fe08703364b5b5a61134ec6421c5fc72434e9df74614e0db5db7 WatchSource:0}: Error finding container 38c6a5f53879fe08703364b5b5a61134ec6421c5fc72434e9df74614e0db5db7: Status 404 returned error can't find the container with id 38c6a5f53879fe08703364b5b5a61134ec6421c5fc72434e9df74614e0db5db7 Jan 30 12:17:35 crc kubenswrapper[4869]: I0130 12:17:35.549115 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 30 12:17:35 crc kubenswrapper[4869]: W0130 12:17:35.551270 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd68922d2_7bd8_45a4_94ca_742713db6ceb.slice/crio-8b9e7f2926843815b9dbfdd9ab7600261782ceaf28628fe210ecd0065d00ce4f WatchSource:0}: Error finding container 8b9e7f2926843815b9dbfdd9ab7600261782ceaf28628fe210ecd0065d00ce4f: Status 404 returned error can't find the container with id 8b9e7f2926843815b9dbfdd9ab7600261782ceaf28628fe210ecd0065d00ce4f Jan 30 12:17:35 crc kubenswrapper[4869]: I0130 12:17:35.606050 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"37b2cba6-4508-4425-af92-7514674301c4","Type":"ContainerStarted","Data":"5cd41c9ee6d6ff04faaf38d1ee7695f5859f0e64e88f185c31809d853e06e9dd"} Jan 30 12:17:35 crc kubenswrapper[4869]: I0130 12:17:35.606105 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"37b2cba6-4508-4425-af92-7514674301c4","Type":"ContainerStarted","Data":"38c6a5f53879fe08703364b5b5a61134ec6421c5fc72434e9df74614e0db5db7"} Jan 30 12:17:35 crc kubenswrapper[4869]: I0130 12:17:35.609157 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"85a8c097-912b-4208-bcbf-606581cfba77","Type":"ContainerStarted","Data":"930d0ddd6198bd8a783a680e3b22aa7961ae6ebc6ceb1b15a89945ed879f589b"} Jan 30 12:17:35 crc kubenswrapper[4869]: I0130 12:17:35.609215 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"85a8c097-912b-4208-bcbf-606581cfba77","Type":"ContainerStarted","Data":"65d518117ffa5be7abb0d24ea7b99d9fd2a8e995993ff750ce794f77bd953e5c"} Jan 30 12:17:35 crc kubenswrapper[4869]: I0130 12:17:35.609229 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"85a8c097-912b-4208-bcbf-606581cfba77","Type":"ContainerStarted","Data":"e9d136c12ffdb1b7298805498739c2bfe70079a5b2ef782468086108e0f030d2"} Jan 30 12:17:35 crc kubenswrapper[4869]: I0130 12:17:35.611029 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"d68922d2-7bd8-45a4-94ca-742713db6ceb","Type":"ContainerStarted","Data":"8b9e7f2926843815b9dbfdd9ab7600261782ceaf28628fe210ecd0065d00ce4f"} Jan 30 12:17:35 crc kubenswrapper[4869]: I0130 12:17:35.616896 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"ec554cf4-4bb3-4861-9763-1d754d0f2c2e","Type":"ContainerStarted","Data":"93635c69676760e4d3def62d4e5e72424f1666c023ec0ed211abdf546465c672"} Jan 30 12:17:35 crc kubenswrapper[4869]: I0130 12:17:35.616946 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"ec554cf4-4bb3-4861-9763-1d754d0f2c2e","Type":"ContainerStarted","Data":"83416814da5d0e133f553be1b4a4e1fa1b3d4ac379b9ee29d81334e2f1fa106e"} Jan 30 12:17:35 crc kubenswrapper[4869]: I0130 12:17:35.616960 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"ec554cf4-4bb3-4861-9763-1d754d0f2c2e","Type":"ContainerStarted","Data":"a9d002fe09f6dc5471669a92b317849c2a96eaa2cee942c2f63554661e8227ec"} Jan 30 12:17:35 crc kubenswrapper[4869]: I0130 12:17:35.626557 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-1" podStartSLOduration=3.626510923 podStartE2EDuration="3.626510923s" podCreationTimestamp="2026-01-30 12:17:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:17:35.625938397 +0000 UTC m=+5006.175814463" watchObservedRunningTime="2026-01-30 12:17:35.626510923 +0000 UTC m=+5006.176386989" Jan 30 12:17:36 crc kubenswrapper[4869]: I0130 12:17:36.402826 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-1" Jan 30 12:17:36 crc kubenswrapper[4869]: I0130 12:17:36.420231 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-2" Jan 30 12:17:36 crc kubenswrapper[4869]: I0130 12:17:36.570918 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Jan 30 12:17:36 crc kubenswrapper[4869]: I0130 12:17:36.588429 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-1" Jan 30 12:17:36 crc kubenswrapper[4869]: I0130 12:17:36.626929 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"37b2cba6-4508-4425-af92-7514674301c4","Type":"ContainerStarted","Data":"96bb781736484a5c095736b7e6baac35e09fb2020ea9ec8429ddb9ecde22eb7c"} Jan 30 12:17:36 crc kubenswrapper[4869]: I0130 12:17:36.630062 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"d68922d2-7bd8-45a4-94ca-742713db6ceb","Type":"ContainerStarted","Data":"2431a84a8a17101674eadc84e4860d4160aac06e67397f87a7a90d1ca44fcde5"} Jan 30 12:17:36 crc kubenswrapper[4869]: I0130 12:17:36.630113 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"d68922d2-7bd8-45a4-94ca-742713db6ceb","Type":"ContainerStarted","Data":"b245b5dfadeb097f7fa64588c4820182b1e5c800d29976ad6818cb488270fd98"} Jan 30 12:17:36 crc kubenswrapper[4869]: I0130 12:17:36.644783 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-2" podStartSLOduration=4.644757756 podStartE2EDuration="4.644757756s" podCreationTimestamp="2026-01-30 12:17:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:17:36.642643036 +0000 UTC m=+5007.192519122" watchObservedRunningTime="2026-01-30 12:17:36.644757756 +0000 UTC m=+5007.194633822" Jan 30 12:17:36 crc kubenswrapper[4869]: I0130 12:17:36.649424 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=4.6494055880000005 podStartE2EDuration="4.649405588s" podCreationTimestamp="2026-01-30 12:17:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:17:35.650635818 +0000 UTC m=+5006.200511894" watchObservedRunningTime="2026-01-30 12:17:36.649405588 +0000 UTC m=+5007.199281654" Jan 30 12:17:36 crc kubenswrapper[4869]: I0130 12:17:36.666511 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=4.666484333 podStartE2EDuration="4.666484333s" podCreationTimestamp="2026-01-30 12:17:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:17:36.658043083 +0000 UTC m=+5007.207919149" watchObservedRunningTime="2026-01-30 12:17:36.666484333 +0000 UTC m=+5007.216360399" Jan 30 12:17:36 crc kubenswrapper[4869]: I0130 12:17:36.675005 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Jan 30 12:17:38 crc kubenswrapper[4869]: I0130 12:17:38.402607 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-1" Jan 30 12:17:38 crc kubenswrapper[4869]: I0130 12:17:38.420036 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-2" Jan 30 12:17:38 crc kubenswrapper[4869]: I0130 12:17:38.570966 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Jan 30 12:17:38 crc kubenswrapper[4869]: I0130 12:17:38.588470 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-1" Jan 30 12:17:38 crc kubenswrapper[4869]: I0130 12:17:38.599633 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-2" Jan 30 12:17:38 crc kubenswrapper[4869]: I0130 12:17:38.675063 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Jan 30 12:17:39 crc kubenswrapper[4869]: I0130 12:17:39.444231 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-1" Jan 30 12:17:39 crc kubenswrapper[4869]: I0130 12:17:39.460382 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-2" Jan 30 12:17:39 crc kubenswrapper[4869]: I0130 12:17:39.489009 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-1" Jan 30 12:17:39 crc kubenswrapper[4869]: I0130 12:17:39.504936 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-2" Jan 30 12:17:39 crc kubenswrapper[4869]: I0130 12:17:39.599570 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-2" Jan 30 12:17:39 crc kubenswrapper[4869]: I0130 12:17:39.622572 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Jan 30 12:17:39 crc kubenswrapper[4869]: I0130 12:17:39.648247 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-2" Jan 30 12:17:39 crc kubenswrapper[4869]: I0130 12:17:39.658373 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-1" Jan 30 12:17:39 crc kubenswrapper[4869]: I0130 12:17:39.681343 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7977bd746f-ss55l"] Jan 30 12:17:39 crc kubenswrapper[4869]: I0130 12:17:39.683073 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7977bd746f-ss55l" Jan 30 12:17:39 crc kubenswrapper[4869]: I0130 12:17:39.692683 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Jan 30 12:17:39 crc kubenswrapper[4869]: I0130 12:17:39.699928 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7977bd746f-ss55l"] Jan 30 12:17:39 crc kubenswrapper[4869]: I0130 12:17:39.825536 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/611a08d3-d74b-449a-8095-54ae6fe144cb-ovsdbserver-nb\") pod \"dnsmasq-dns-7977bd746f-ss55l\" (UID: \"611a08d3-d74b-449a-8095-54ae6fe144cb\") " pod="openstack/dnsmasq-dns-7977bd746f-ss55l" Jan 30 12:17:39 crc kubenswrapper[4869]: I0130 12:17:39.826047 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5fjw\" (UniqueName: \"kubernetes.io/projected/611a08d3-d74b-449a-8095-54ae6fe144cb-kube-api-access-x5fjw\") pod \"dnsmasq-dns-7977bd746f-ss55l\" (UID: \"611a08d3-d74b-449a-8095-54ae6fe144cb\") " pod="openstack/dnsmasq-dns-7977bd746f-ss55l" Jan 30 12:17:39 crc kubenswrapper[4869]: I0130 12:17:39.826349 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/611a08d3-d74b-449a-8095-54ae6fe144cb-config\") pod \"dnsmasq-dns-7977bd746f-ss55l\" (UID: \"611a08d3-d74b-449a-8095-54ae6fe144cb\") " pod="openstack/dnsmasq-dns-7977bd746f-ss55l" Jan 30 12:17:39 crc kubenswrapper[4869]: I0130 12:17:39.826495 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/611a08d3-d74b-449a-8095-54ae6fe144cb-dns-svc\") pod \"dnsmasq-dns-7977bd746f-ss55l\" (UID: \"611a08d3-d74b-449a-8095-54ae6fe144cb\") " pod="openstack/dnsmasq-dns-7977bd746f-ss55l" Jan 30 12:17:39 crc kubenswrapper[4869]: I0130 12:17:39.896817 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Jan 30 12:17:39 crc kubenswrapper[4869]: I0130 12:17:39.928195 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/611a08d3-d74b-449a-8095-54ae6fe144cb-ovsdbserver-nb\") pod \"dnsmasq-dns-7977bd746f-ss55l\" (UID: \"611a08d3-d74b-449a-8095-54ae6fe144cb\") " pod="openstack/dnsmasq-dns-7977bd746f-ss55l" Jan 30 12:17:39 crc kubenswrapper[4869]: I0130 12:17:39.928265 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5fjw\" (UniqueName: \"kubernetes.io/projected/611a08d3-d74b-449a-8095-54ae6fe144cb-kube-api-access-x5fjw\") pod \"dnsmasq-dns-7977bd746f-ss55l\" (UID: \"611a08d3-d74b-449a-8095-54ae6fe144cb\") " pod="openstack/dnsmasq-dns-7977bd746f-ss55l" Jan 30 12:17:39 crc kubenswrapper[4869]: I0130 12:17:39.928812 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/611a08d3-d74b-449a-8095-54ae6fe144cb-config\") pod \"dnsmasq-dns-7977bd746f-ss55l\" (UID: \"611a08d3-d74b-449a-8095-54ae6fe144cb\") " pod="openstack/dnsmasq-dns-7977bd746f-ss55l" Jan 30 12:17:39 crc kubenswrapper[4869]: I0130 12:17:39.929251 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/611a08d3-d74b-449a-8095-54ae6fe144cb-ovsdbserver-nb\") pod \"dnsmasq-dns-7977bd746f-ss55l\" (UID: \"611a08d3-d74b-449a-8095-54ae6fe144cb\") " pod="openstack/dnsmasq-dns-7977bd746f-ss55l" Jan 30 12:17:39 crc kubenswrapper[4869]: I0130 12:17:39.929512 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/611a08d3-d74b-449a-8095-54ae6fe144cb-config\") pod \"dnsmasq-dns-7977bd746f-ss55l\" (UID: \"611a08d3-d74b-449a-8095-54ae6fe144cb\") " pod="openstack/dnsmasq-dns-7977bd746f-ss55l" Jan 30 12:17:39 crc kubenswrapper[4869]: I0130 12:17:39.929602 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/611a08d3-d74b-449a-8095-54ae6fe144cb-dns-svc\") pod \"dnsmasq-dns-7977bd746f-ss55l\" (UID: \"611a08d3-d74b-449a-8095-54ae6fe144cb\") " pod="openstack/dnsmasq-dns-7977bd746f-ss55l" Jan 30 12:17:39 crc kubenswrapper[4869]: I0130 12:17:39.930187 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/611a08d3-d74b-449a-8095-54ae6fe144cb-dns-svc\") pod \"dnsmasq-dns-7977bd746f-ss55l\" (UID: \"611a08d3-d74b-449a-8095-54ae6fe144cb\") " pod="openstack/dnsmasq-dns-7977bd746f-ss55l" Jan 30 12:17:39 crc kubenswrapper[4869]: I0130 12:17:39.947458 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5fjw\" (UniqueName: \"kubernetes.io/projected/611a08d3-d74b-449a-8095-54ae6fe144cb-kube-api-access-x5fjw\") pod \"dnsmasq-dns-7977bd746f-ss55l\" (UID: \"611a08d3-d74b-449a-8095-54ae6fe144cb\") " pod="openstack/dnsmasq-dns-7977bd746f-ss55l" Jan 30 12:17:40 crc kubenswrapper[4869]: I0130 12:17:40.025075 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7977bd746f-ss55l" Jan 30 12:17:40 crc kubenswrapper[4869]: I0130 12:17:40.464031 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7977bd746f-ss55l"] Jan 30 12:17:40 crc kubenswrapper[4869]: W0130 12:17:40.467637 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod611a08d3_d74b_449a_8095_54ae6fe144cb.slice/crio-4def91040dd616412760f5bca2688e59b7b0ece71f792666918394e37607876b WatchSource:0}: Error finding container 4def91040dd616412760f5bca2688e59b7b0ece71f792666918394e37607876b: Status 404 returned error can't find the container with id 4def91040dd616412760f5bca2688e59b7b0ece71f792666918394e37607876b Jan 30 12:17:40 crc kubenswrapper[4869]: I0130 12:17:40.664701 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7977bd746f-ss55l" event={"ID":"611a08d3-d74b-449a-8095-54ae6fe144cb","Type":"ContainerStarted","Data":"0da3803d7cf83ebea8cb80b2098c03752d02d808fa131e6d625195d15df0a9c2"} Jan 30 12:17:40 crc kubenswrapper[4869]: I0130 12:17:40.665090 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7977bd746f-ss55l" event={"ID":"611a08d3-d74b-449a-8095-54ae6fe144cb","Type":"ContainerStarted","Data":"4def91040dd616412760f5bca2688e59b7b0ece71f792666918394e37607876b"} Jan 30 12:17:40 crc kubenswrapper[4869]: I0130 12:17:40.709675 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Jan 30 12:17:40 crc kubenswrapper[4869]: I0130 12:17:40.710686 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-2" Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.108109 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7977bd746f-ss55l"] Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.139331 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-644c8fb5bc-fvfcb"] Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.141040 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.143872 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.147994 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-644c8fb5bc-fvfcb"] Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.249989 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/661dc461-1fbd-4408-a739-c0b70e0d66bb-ovsdbserver-nb\") pod \"dnsmasq-dns-644c8fb5bc-fvfcb\" (UID: \"661dc461-1fbd-4408-a739-c0b70e0d66bb\") " pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.250361 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/661dc461-1fbd-4408-a739-c0b70e0d66bb-dns-svc\") pod \"dnsmasq-dns-644c8fb5bc-fvfcb\" (UID: \"661dc461-1fbd-4408-a739-c0b70e0d66bb\") " pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.250389 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66tc5\" (UniqueName: \"kubernetes.io/projected/661dc461-1fbd-4408-a739-c0b70e0d66bb-kube-api-access-66tc5\") pod \"dnsmasq-dns-644c8fb5bc-fvfcb\" (UID: \"661dc461-1fbd-4408-a739-c0b70e0d66bb\") " pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.250414 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/661dc461-1fbd-4408-a739-c0b70e0d66bb-ovsdbserver-sb\") pod \"dnsmasq-dns-644c8fb5bc-fvfcb\" (UID: \"661dc461-1fbd-4408-a739-c0b70e0d66bb\") " pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.250430 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/661dc461-1fbd-4408-a739-c0b70e0d66bb-config\") pod \"dnsmasq-dns-644c8fb5bc-fvfcb\" (UID: \"661dc461-1fbd-4408-a739-c0b70e0d66bb\") " pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.352584 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/661dc461-1fbd-4408-a739-c0b70e0d66bb-ovsdbserver-nb\") pod \"dnsmasq-dns-644c8fb5bc-fvfcb\" (UID: \"661dc461-1fbd-4408-a739-c0b70e0d66bb\") " pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.352649 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/661dc461-1fbd-4408-a739-c0b70e0d66bb-dns-svc\") pod \"dnsmasq-dns-644c8fb5bc-fvfcb\" (UID: \"661dc461-1fbd-4408-a739-c0b70e0d66bb\") " pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.352676 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66tc5\" (UniqueName: \"kubernetes.io/projected/661dc461-1fbd-4408-a739-c0b70e0d66bb-kube-api-access-66tc5\") pod \"dnsmasq-dns-644c8fb5bc-fvfcb\" (UID: \"661dc461-1fbd-4408-a739-c0b70e0d66bb\") " pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.352744 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/661dc461-1fbd-4408-a739-c0b70e0d66bb-ovsdbserver-sb\") pod \"dnsmasq-dns-644c8fb5bc-fvfcb\" (UID: \"661dc461-1fbd-4408-a739-c0b70e0d66bb\") " pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.352765 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/661dc461-1fbd-4408-a739-c0b70e0d66bb-config\") pod \"dnsmasq-dns-644c8fb5bc-fvfcb\" (UID: \"661dc461-1fbd-4408-a739-c0b70e0d66bb\") " pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.354402 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/661dc461-1fbd-4408-a739-c0b70e0d66bb-config\") pod \"dnsmasq-dns-644c8fb5bc-fvfcb\" (UID: \"661dc461-1fbd-4408-a739-c0b70e0d66bb\") " pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.355200 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/661dc461-1fbd-4408-a739-c0b70e0d66bb-dns-svc\") pod \"dnsmasq-dns-644c8fb5bc-fvfcb\" (UID: \"661dc461-1fbd-4408-a739-c0b70e0d66bb\") " pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.355442 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/661dc461-1fbd-4408-a739-c0b70e0d66bb-ovsdbserver-nb\") pod \"dnsmasq-dns-644c8fb5bc-fvfcb\" (UID: \"661dc461-1fbd-4408-a739-c0b70e0d66bb\") " pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.355820 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/661dc461-1fbd-4408-a739-c0b70e0d66bb-ovsdbserver-sb\") pod \"dnsmasq-dns-644c8fb5bc-fvfcb\" (UID: \"661dc461-1fbd-4408-a739-c0b70e0d66bb\") " pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.377250 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66tc5\" (UniqueName: \"kubernetes.io/projected/661dc461-1fbd-4408-a739-c0b70e0d66bb-kube-api-access-66tc5\") pod \"dnsmasq-dns-644c8fb5bc-fvfcb\" (UID: \"661dc461-1fbd-4408-a739-c0b70e0d66bb\") " pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.457767 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.679229 4869 generic.go:334] "Generic (PLEG): container finished" podID="611a08d3-d74b-449a-8095-54ae6fe144cb" containerID="0da3803d7cf83ebea8cb80b2098c03752d02d808fa131e6d625195d15df0a9c2" exitCode=0 Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.679914 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7977bd746f-ss55l" event={"ID":"611a08d3-d74b-449a-8095-54ae6fe144cb","Type":"ContainerDied","Data":"0da3803d7cf83ebea8cb80b2098c03752d02d808fa131e6d625195d15df0a9c2"} Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.680055 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7977bd746f-ss55l" event={"ID":"611a08d3-d74b-449a-8095-54ae6fe144cb","Type":"ContainerStarted","Data":"07d85b3030c8ece55bb86a45d1c0d58c3bb6c01daf33f036057819b3a3611004"} Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.680183 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7977bd746f-ss55l" Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.700239 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7977bd746f-ss55l" podStartSLOduration=2.700214525 podStartE2EDuration="2.700214525s" podCreationTimestamp="2026-01-30 12:17:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:17:41.699276668 +0000 UTC m=+5012.249152734" watchObservedRunningTime="2026-01-30 12:17:41.700214525 +0000 UTC m=+5012.250090591" Jan 30 12:17:41 crc kubenswrapper[4869]: I0130 12:17:41.860406 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-644c8fb5bc-fvfcb"] Jan 30 12:17:41 crc kubenswrapper[4869]: W0130 12:17:41.870974 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod661dc461_1fbd_4408_a739_c0b70e0d66bb.slice/crio-c852f2bd130da24497d7e55d428a7e5759b2db7a57148274011ed5ab968447e0 WatchSource:0}: Error finding container c852f2bd130da24497d7e55d428a7e5759b2db7a57148274011ed5ab968447e0: Status 404 returned error can't find the container with id c852f2bd130da24497d7e55d428a7e5759b2db7a57148274011ed5ab968447e0 Jan 30 12:17:42 crc kubenswrapper[4869]: I0130 12:17:42.690062 4869 generic.go:334] "Generic (PLEG): container finished" podID="661dc461-1fbd-4408-a739-c0b70e0d66bb" containerID="5709ba87e38dbf851bee4108d3f12b5099b2ad44157a3a1e37d90670a644c0bb" exitCode=0 Jan 30 12:17:42 crc kubenswrapper[4869]: I0130 12:17:42.690665 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7977bd746f-ss55l" podUID="611a08d3-d74b-449a-8095-54ae6fe144cb" containerName="dnsmasq-dns" containerID="cri-o://07d85b3030c8ece55bb86a45d1c0d58c3bb6c01daf33f036057819b3a3611004" gracePeriod=10 Jan 30 12:17:42 crc kubenswrapper[4869]: I0130 12:17:42.690145 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" event={"ID":"661dc461-1fbd-4408-a739-c0b70e0d66bb","Type":"ContainerDied","Data":"5709ba87e38dbf851bee4108d3f12b5099b2ad44157a3a1e37d90670a644c0bb"} Jan 30 12:17:42 crc kubenswrapper[4869]: I0130 12:17:42.690793 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" event={"ID":"661dc461-1fbd-4408-a739-c0b70e0d66bb","Type":"ContainerStarted","Data":"c852f2bd130da24497d7e55d428a7e5759b2db7a57148274011ed5ab968447e0"} Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.139792 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7977bd746f-ss55l" Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.279563 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/611a08d3-d74b-449a-8095-54ae6fe144cb-dns-svc\") pod \"611a08d3-d74b-449a-8095-54ae6fe144cb\" (UID: \"611a08d3-d74b-449a-8095-54ae6fe144cb\") " Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.280185 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/611a08d3-d74b-449a-8095-54ae6fe144cb-ovsdbserver-nb\") pod \"611a08d3-d74b-449a-8095-54ae6fe144cb\" (UID: \"611a08d3-d74b-449a-8095-54ae6fe144cb\") " Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.280337 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/611a08d3-d74b-449a-8095-54ae6fe144cb-config\") pod \"611a08d3-d74b-449a-8095-54ae6fe144cb\" (UID: \"611a08d3-d74b-449a-8095-54ae6fe144cb\") " Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.280456 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x5fjw\" (UniqueName: \"kubernetes.io/projected/611a08d3-d74b-449a-8095-54ae6fe144cb-kube-api-access-x5fjw\") pod \"611a08d3-d74b-449a-8095-54ae6fe144cb\" (UID: \"611a08d3-d74b-449a-8095-54ae6fe144cb\") " Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.298943 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/611a08d3-d74b-449a-8095-54ae6fe144cb-kube-api-access-x5fjw" (OuterVolumeSpecName: "kube-api-access-x5fjw") pod "611a08d3-d74b-449a-8095-54ae6fe144cb" (UID: "611a08d3-d74b-449a-8095-54ae6fe144cb"). InnerVolumeSpecName "kube-api-access-x5fjw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.320810 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/611a08d3-d74b-449a-8095-54ae6fe144cb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "611a08d3-d74b-449a-8095-54ae6fe144cb" (UID: "611a08d3-d74b-449a-8095-54ae6fe144cb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.322533 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/611a08d3-d74b-449a-8095-54ae6fe144cb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "611a08d3-d74b-449a-8095-54ae6fe144cb" (UID: "611a08d3-d74b-449a-8095-54ae6fe144cb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.323053 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/611a08d3-d74b-449a-8095-54ae6fe144cb-config" (OuterVolumeSpecName: "config") pod "611a08d3-d74b-449a-8095-54ae6fe144cb" (UID: "611a08d3-d74b-449a-8095-54ae6fe144cb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.383997 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/611a08d3-d74b-449a-8095-54ae6fe144cb-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.384216 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/611a08d3-d74b-449a-8095-54ae6fe144cb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.384281 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/611a08d3-d74b-449a-8095-54ae6fe144cb-config\") on node \"crc\" DevicePath \"\"" Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.384338 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x5fjw\" (UniqueName: \"kubernetes.io/projected/611a08d3-d74b-449a-8095-54ae6fe144cb-kube-api-access-x5fjw\") on node \"crc\" DevicePath \"\"" Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.611337 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.641029 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-1" Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.703950 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" event={"ID":"661dc461-1fbd-4408-a739-c0b70e0d66bb","Type":"ContainerStarted","Data":"ed8d9df2e746e12a06a489338c408cd79c4eb81166a0848d0632ebd192c565cf"} Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.704871 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.707593 4869 generic.go:334] "Generic (PLEG): container finished" podID="611a08d3-d74b-449a-8095-54ae6fe144cb" containerID="07d85b3030c8ece55bb86a45d1c0d58c3bb6c01daf33f036057819b3a3611004" exitCode=0 Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.707634 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7977bd746f-ss55l" event={"ID":"611a08d3-d74b-449a-8095-54ae6fe144cb","Type":"ContainerDied","Data":"07d85b3030c8ece55bb86a45d1c0d58c3bb6c01daf33f036057819b3a3611004"} Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.707660 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7977bd746f-ss55l" event={"ID":"611a08d3-d74b-449a-8095-54ae6fe144cb","Type":"ContainerDied","Data":"4def91040dd616412760f5bca2688e59b7b0ece71f792666918394e37607876b"} Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.707678 4869 scope.go:117] "RemoveContainer" containerID="07d85b3030c8ece55bb86a45d1c0d58c3bb6c01daf33f036057819b3a3611004" Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.707822 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7977bd746f-ss55l" Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.736051 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" podStartSLOduration=2.736022642 podStartE2EDuration="2.736022642s" podCreationTimestamp="2026-01-30 12:17:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:17:43.722832538 +0000 UTC m=+5014.272708624" watchObservedRunningTime="2026-01-30 12:17:43.736022642 +0000 UTC m=+5014.285898708" Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.747230 4869 scope.go:117] "RemoveContainer" containerID="0da3803d7cf83ebea8cb80b2098c03752d02d808fa131e6d625195d15df0a9c2" Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.754043 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7977bd746f-ss55l"] Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.765819 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7977bd746f-ss55l"] Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.767435 4869 scope.go:117] "RemoveContainer" containerID="07d85b3030c8ece55bb86a45d1c0d58c3bb6c01daf33f036057819b3a3611004" Jan 30 12:17:43 crc kubenswrapper[4869]: E0130 12:17:43.768132 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07d85b3030c8ece55bb86a45d1c0d58c3bb6c01daf33f036057819b3a3611004\": container with ID starting with 07d85b3030c8ece55bb86a45d1c0d58c3bb6c01daf33f036057819b3a3611004 not found: ID does not exist" containerID="07d85b3030c8ece55bb86a45d1c0d58c3bb6c01daf33f036057819b3a3611004" Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.768187 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07d85b3030c8ece55bb86a45d1c0d58c3bb6c01daf33f036057819b3a3611004"} err="failed to get container status \"07d85b3030c8ece55bb86a45d1c0d58c3bb6c01daf33f036057819b3a3611004\": rpc error: code = NotFound desc = could not find container \"07d85b3030c8ece55bb86a45d1c0d58c3bb6c01daf33f036057819b3a3611004\": container with ID starting with 07d85b3030c8ece55bb86a45d1c0d58c3bb6c01daf33f036057819b3a3611004 not found: ID does not exist" Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.768224 4869 scope.go:117] "RemoveContainer" containerID="0da3803d7cf83ebea8cb80b2098c03752d02d808fa131e6d625195d15df0a9c2" Jan 30 12:17:43 crc kubenswrapper[4869]: E0130 12:17:43.768552 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0da3803d7cf83ebea8cb80b2098c03752d02d808fa131e6d625195d15df0a9c2\": container with ID starting with 0da3803d7cf83ebea8cb80b2098c03752d02d808fa131e6d625195d15df0a9c2 not found: ID does not exist" containerID="0da3803d7cf83ebea8cb80b2098c03752d02d808fa131e6d625195d15df0a9c2" Jan 30 12:17:43 crc kubenswrapper[4869]: I0130 12:17:43.768583 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0da3803d7cf83ebea8cb80b2098c03752d02d808fa131e6d625195d15df0a9c2"} err="failed to get container status \"0da3803d7cf83ebea8cb80b2098c03752d02d808fa131e6d625195d15df0a9c2\": rpc error: code = NotFound desc = could not find container \"0da3803d7cf83ebea8cb80b2098c03752d02d808fa131e6d625195d15df0a9c2\": container with ID starting with 0da3803d7cf83ebea8cb80b2098c03752d02d808fa131e6d625195d15df0a9c2 not found: ID does not exist" Jan 30 12:17:44 crc kubenswrapper[4869]: I0130 12:17:44.142686 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="611a08d3-d74b-449a-8095-54ae6fe144cb" path="/var/lib/kubelet/pods/611a08d3-d74b-449a-8095-54ae6fe144cb/volumes" Jan 30 12:17:46 crc kubenswrapper[4869]: I0130 12:17:46.370260 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-copy-data"] Jan 30 12:17:46 crc kubenswrapper[4869]: E0130 12:17:46.370678 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="611a08d3-d74b-449a-8095-54ae6fe144cb" containerName="dnsmasq-dns" Jan 30 12:17:46 crc kubenswrapper[4869]: I0130 12:17:46.370697 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="611a08d3-d74b-449a-8095-54ae6fe144cb" containerName="dnsmasq-dns" Jan 30 12:17:46 crc kubenswrapper[4869]: E0130 12:17:46.370747 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="611a08d3-d74b-449a-8095-54ae6fe144cb" containerName="init" Jan 30 12:17:46 crc kubenswrapper[4869]: I0130 12:17:46.370756 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="611a08d3-d74b-449a-8095-54ae6fe144cb" containerName="init" Jan 30 12:17:46 crc kubenswrapper[4869]: I0130 12:17:46.371026 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="611a08d3-d74b-449a-8095-54ae6fe144cb" containerName="dnsmasq-dns" Jan 30 12:17:46 crc kubenswrapper[4869]: I0130 12:17:46.371776 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Jan 30 12:17:46 crc kubenswrapper[4869]: I0130 12:17:46.376923 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovn-data-cert" Jan 30 12:17:46 crc kubenswrapper[4869]: I0130 12:17:46.377275 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Jan 30 12:17:46 crc kubenswrapper[4869]: I0130 12:17:46.533960 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-2af0e1b6-3e16-479d-9863-9a38beb21ded\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2af0e1b6-3e16-479d-9863-9a38beb21ded\") pod \"ovn-copy-data\" (UID: \"32f585a7-6cc0-4ea3-aad6-1fd82d3a0358\") " pod="openstack/ovn-copy-data" Jan 30 12:17:46 crc kubenswrapper[4869]: I0130 12:17:46.534083 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bpwr\" (UniqueName: \"kubernetes.io/projected/32f585a7-6cc0-4ea3-aad6-1fd82d3a0358-kube-api-access-7bpwr\") pod \"ovn-copy-data\" (UID: \"32f585a7-6cc0-4ea3-aad6-1fd82d3a0358\") " pod="openstack/ovn-copy-data" Jan 30 12:17:46 crc kubenswrapper[4869]: I0130 12:17:46.534109 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/32f585a7-6cc0-4ea3-aad6-1fd82d3a0358-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"32f585a7-6cc0-4ea3-aad6-1fd82d3a0358\") " pod="openstack/ovn-copy-data" Jan 30 12:17:46 crc kubenswrapper[4869]: I0130 12:17:46.635288 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bpwr\" (UniqueName: \"kubernetes.io/projected/32f585a7-6cc0-4ea3-aad6-1fd82d3a0358-kube-api-access-7bpwr\") pod \"ovn-copy-data\" (UID: \"32f585a7-6cc0-4ea3-aad6-1fd82d3a0358\") " pod="openstack/ovn-copy-data" Jan 30 12:17:46 crc kubenswrapper[4869]: I0130 12:17:46.635356 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/32f585a7-6cc0-4ea3-aad6-1fd82d3a0358-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"32f585a7-6cc0-4ea3-aad6-1fd82d3a0358\") " pod="openstack/ovn-copy-data" Jan 30 12:17:46 crc kubenswrapper[4869]: I0130 12:17:46.635490 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-2af0e1b6-3e16-479d-9863-9a38beb21ded\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2af0e1b6-3e16-479d-9863-9a38beb21ded\") pod \"ovn-copy-data\" (UID: \"32f585a7-6cc0-4ea3-aad6-1fd82d3a0358\") " pod="openstack/ovn-copy-data" Jan 30 12:17:46 crc kubenswrapper[4869]: I0130 12:17:46.639413 4869 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 12:17:46 crc kubenswrapper[4869]: I0130 12:17:46.639455 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-2af0e1b6-3e16-479d-9863-9a38beb21ded\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2af0e1b6-3e16-479d-9863-9a38beb21ded\") pod \"ovn-copy-data\" (UID: \"32f585a7-6cc0-4ea3-aad6-1fd82d3a0358\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/207be0ba58c4b67c86aa6f7f1a8c98028181ac99bd4440053e6d26e31dec4be2/globalmount\"" pod="openstack/ovn-copy-data" Jan 30 12:17:46 crc kubenswrapper[4869]: I0130 12:17:46.641200 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/32f585a7-6cc0-4ea3-aad6-1fd82d3a0358-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"32f585a7-6cc0-4ea3-aad6-1fd82d3a0358\") " pod="openstack/ovn-copy-data" Jan 30 12:17:46 crc kubenswrapper[4869]: I0130 12:17:46.654476 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bpwr\" (UniqueName: \"kubernetes.io/projected/32f585a7-6cc0-4ea3-aad6-1fd82d3a0358-kube-api-access-7bpwr\") pod \"ovn-copy-data\" (UID: \"32f585a7-6cc0-4ea3-aad6-1fd82d3a0358\") " pod="openstack/ovn-copy-data" Jan 30 12:17:46 crc kubenswrapper[4869]: I0130 12:17:46.662818 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-2af0e1b6-3e16-479d-9863-9a38beb21ded\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2af0e1b6-3e16-479d-9863-9a38beb21ded\") pod \"ovn-copy-data\" (UID: \"32f585a7-6cc0-4ea3-aad6-1fd82d3a0358\") " pod="openstack/ovn-copy-data" Jan 30 12:17:46 crc kubenswrapper[4869]: I0130 12:17:46.693026 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Jan 30 12:17:47 crc kubenswrapper[4869]: I0130 12:17:47.173537 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Jan 30 12:17:47 crc kubenswrapper[4869]: W0130 12:17:47.179585 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32f585a7_6cc0_4ea3_aad6_1fd82d3a0358.slice/crio-b97ce1f9715585b47798f998329ef0b5d00450358624c74ce71376757e6abc73 WatchSource:0}: Error finding container b97ce1f9715585b47798f998329ef0b5d00450358624c74ce71376757e6abc73: Status 404 returned error can't find the container with id b97ce1f9715585b47798f998329ef0b5d00450358624c74ce71376757e6abc73 Jan 30 12:17:47 crc kubenswrapper[4869]: I0130 12:17:47.761641 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"32f585a7-6cc0-4ea3-aad6-1fd82d3a0358","Type":"ContainerStarted","Data":"892a6ccc0cdd3700b868ebd7a0795428fa036ab46a08a34b6ef48f5140030cb8"} Jan 30 12:17:47 crc kubenswrapper[4869]: I0130 12:17:47.762073 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"32f585a7-6cc0-4ea3-aad6-1fd82d3a0358","Type":"ContainerStarted","Data":"b97ce1f9715585b47798f998329ef0b5d00450358624c74ce71376757e6abc73"} Jan 30 12:17:47 crc kubenswrapper[4869]: I0130 12:17:47.786054 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-copy-data" podStartSLOduration=2.78602788 podStartE2EDuration="2.78602788s" podCreationTimestamp="2026-01-30 12:17:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:17:47.777319963 +0000 UTC m=+5018.327196049" watchObservedRunningTime="2026-01-30 12:17:47.78602788 +0000 UTC m=+5018.335903946" Jan 30 12:17:51 crc kubenswrapper[4869]: I0130 12:17:51.458925 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" Jan 30 12:17:51 crc kubenswrapper[4869]: I0130 12:17:51.511138 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-jzdnb"] Jan 30 12:17:51 crc kubenswrapper[4869]: I0130 12:17:51.769049 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 12:17:51 crc kubenswrapper[4869]: I0130 12:17:51.769491 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 12:17:51 crc kubenswrapper[4869]: I0130 12:17:51.787613 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5b7946d7b9-jzdnb" podUID="140c446e-0248-45f5-9bc8-d09918522fdb" containerName="dnsmasq-dns" containerID="cri-o://091dbbe029d970b8542689d13d35d6397858626ab2c4d213c9bba11748c3cf84" gracePeriod=10 Jan 30 12:17:52 crc kubenswrapper[4869]: I0130 12:17:52.238431 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7946d7b9-jzdnb" Jan 30 12:17:52 crc kubenswrapper[4869]: I0130 12:17:52.335862 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/140c446e-0248-45f5-9bc8-d09918522fdb-config\") pod \"140c446e-0248-45f5-9bc8-d09918522fdb\" (UID: \"140c446e-0248-45f5-9bc8-d09918522fdb\") " Jan 30 12:17:52 crc kubenswrapper[4869]: I0130 12:17:52.335977 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-clzlt\" (UniqueName: \"kubernetes.io/projected/140c446e-0248-45f5-9bc8-d09918522fdb-kube-api-access-clzlt\") pod \"140c446e-0248-45f5-9bc8-d09918522fdb\" (UID: \"140c446e-0248-45f5-9bc8-d09918522fdb\") " Jan 30 12:17:52 crc kubenswrapper[4869]: I0130 12:17:52.336134 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/140c446e-0248-45f5-9bc8-d09918522fdb-dns-svc\") pod \"140c446e-0248-45f5-9bc8-d09918522fdb\" (UID: \"140c446e-0248-45f5-9bc8-d09918522fdb\") " Jan 30 12:17:52 crc kubenswrapper[4869]: I0130 12:17:52.343055 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/140c446e-0248-45f5-9bc8-d09918522fdb-kube-api-access-clzlt" (OuterVolumeSpecName: "kube-api-access-clzlt") pod "140c446e-0248-45f5-9bc8-d09918522fdb" (UID: "140c446e-0248-45f5-9bc8-d09918522fdb"). InnerVolumeSpecName "kube-api-access-clzlt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:17:52 crc kubenswrapper[4869]: I0130 12:17:52.382097 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/140c446e-0248-45f5-9bc8-d09918522fdb-config" (OuterVolumeSpecName: "config") pod "140c446e-0248-45f5-9bc8-d09918522fdb" (UID: "140c446e-0248-45f5-9bc8-d09918522fdb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 12:17:52 crc kubenswrapper[4869]: I0130 12:17:52.384945 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/140c446e-0248-45f5-9bc8-d09918522fdb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "140c446e-0248-45f5-9bc8-d09918522fdb" (UID: "140c446e-0248-45f5-9bc8-d09918522fdb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 12:17:52 crc kubenswrapper[4869]: I0130 12:17:52.438803 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/140c446e-0248-45f5-9bc8-d09918522fdb-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 12:17:52 crc kubenswrapper[4869]: I0130 12:17:52.438847 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/140c446e-0248-45f5-9bc8-d09918522fdb-config\") on node \"crc\" DevicePath \"\"" Jan 30 12:17:52 crc kubenswrapper[4869]: I0130 12:17:52.438866 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-clzlt\" (UniqueName: \"kubernetes.io/projected/140c446e-0248-45f5-9bc8-d09918522fdb-kube-api-access-clzlt\") on node \"crc\" DevicePath \"\"" Jan 30 12:17:52 crc kubenswrapper[4869]: I0130 12:17:52.796508 4869 generic.go:334] "Generic (PLEG): container finished" podID="140c446e-0248-45f5-9bc8-d09918522fdb" containerID="091dbbe029d970b8542689d13d35d6397858626ab2c4d213c9bba11748c3cf84" exitCode=0 Jan 30 12:17:52 crc kubenswrapper[4869]: I0130 12:17:52.796557 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-jzdnb" event={"ID":"140c446e-0248-45f5-9bc8-d09918522fdb","Type":"ContainerDied","Data":"091dbbe029d970b8542689d13d35d6397858626ab2c4d213c9bba11748c3cf84"} Jan 30 12:17:52 crc kubenswrapper[4869]: I0130 12:17:52.796598 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-jzdnb" event={"ID":"140c446e-0248-45f5-9bc8-d09918522fdb","Type":"ContainerDied","Data":"da0ce924857998b2923138c0820c57c3ff26942f3de242d37ab51c00d782ab4e"} Jan 30 12:17:52 crc kubenswrapper[4869]: I0130 12:17:52.796620 4869 scope.go:117] "RemoveContainer" containerID="091dbbe029d970b8542689d13d35d6397858626ab2c4d213c9bba11748c3cf84" Jan 30 12:17:52 crc kubenswrapper[4869]: I0130 12:17:52.796621 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7946d7b9-jzdnb" Jan 30 12:17:52 crc kubenswrapper[4869]: I0130 12:17:52.818047 4869 scope.go:117] "RemoveContainer" containerID="6083312d796fb1a9afa7b33634c9703f7a823ccd00012cab1592fdcba52a25fd" Jan 30 12:17:52 crc kubenswrapper[4869]: I0130 12:17:52.831957 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-jzdnb"] Jan 30 12:17:52 crc kubenswrapper[4869]: I0130 12:17:52.839043 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-jzdnb"] Jan 30 12:17:52 crc kubenswrapper[4869]: I0130 12:17:52.850144 4869 scope.go:117] "RemoveContainer" containerID="091dbbe029d970b8542689d13d35d6397858626ab2c4d213c9bba11748c3cf84" Jan 30 12:17:52 crc kubenswrapper[4869]: E0130 12:17:52.850837 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"091dbbe029d970b8542689d13d35d6397858626ab2c4d213c9bba11748c3cf84\": container with ID starting with 091dbbe029d970b8542689d13d35d6397858626ab2c4d213c9bba11748c3cf84 not found: ID does not exist" containerID="091dbbe029d970b8542689d13d35d6397858626ab2c4d213c9bba11748c3cf84" Jan 30 12:17:52 crc kubenswrapper[4869]: I0130 12:17:52.850898 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"091dbbe029d970b8542689d13d35d6397858626ab2c4d213c9bba11748c3cf84"} err="failed to get container status \"091dbbe029d970b8542689d13d35d6397858626ab2c4d213c9bba11748c3cf84\": rpc error: code = NotFound desc = could not find container \"091dbbe029d970b8542689d13d35d6397858626ab2c4d213c9bba11748c3cf84\": container with ID starting with 091dbbe029d970b8542689d13d35d6397858626ab2c4d213c9bba11748c3cf84 not found: ID does not exist" Jan 30 12:17:52 crc kubenswrapper[4869]: I0130 12:17:52.850939 4869 scope.go:117] "RemoveContainer" containerID="6083312d796fb1a9afa7b33634c9703f7a823ccd00012cab1592fdcba52a25fd" Jan 30 12:17:52 crc kubenswrapper[4869]: E0130 12:17:52.851474 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6083312d796fb1a9afa7b33634c9703f7a823ccd00012cab1592fdcba52a25fd\": container with ID starting with 6083312d796fb1a9afa7b33634c9703f7a823ccd00012cab1592fdcba52a25fd not found: ID does not exist" containerID="6083312d796fb1a9afa7b33634c9703f7a823ccd00012cab1592fdcba52a25fd" Jan 30 12:17:52 crc kubenswrapper[4869]: I0130 12:17:52.851511 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6083312d796fb1a9afa7b33634c9703f7a823ccd00012cab1592fdcba52a25fd"} err="failed to get container status \"6083312d796fb1a9afa7b33634c9703f7a823ccd00012cab1592fdcba52a25fd\": rpc error: code = NotFound desc = could not find container \"6083312d796fb1a9afa7b33634c9703f7a823ccd00012cab1592fdcba52a25fd\": container with ID starting with 6083312d796fb1a9afa7b33634c9703f7a823ccd00012cab1592fdcba52a25fd not found: ID does not exist" Jan 30 12:17:53 crc kubenswrapper[4869]: I0130 12:17:53.273350 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Jan 30 12:17:53 crc kubenswrapper[4869]: E0130 12:17:53.273794 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="140c446e-0248-45f5-9bc8-d09918522fdb" containerName="init" Jan 30 12:17:53 crc kubenswrapper[4869]: I0130 12:17:53.273818 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="140c446e-0248-45f5-9bc8-d09918522fdb" containerName="init" Jan 30 12:17:53 crc kubenswrapper[4869]: E0130 12:17:53.273844 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="140c446e-0248-45f5-9bc8-d09918522fdb" containerName="dnsmasq-dns" Jan 30 12:17:53 crc kubenswrapper[4869]: I0130 12:17:53.273853 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="140c446e-0248-45f5-9bc8-d09918522fdb" containerName="dnsmasq-dns" Jan 30 12:17:53 crc kubenswrapper[4869]: I0130 12:17:53.274059 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="140c446e-0248-45f5-9bc8-d09918522fdb" containerName="dnsmasq-dns" Jan 30 12:17:53 crc kubenswrapper[4869]: I0130 12:17:53.275345 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 30 12:17:53 crc kubenswrapper[4869]: I0130 12:17:53.289800 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Jan 30 12:17:53 crc kubenswrapper[4869]: I0130 12:17:53.290009 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Jan 30 12:17:53 crc kubenswrapper[4869]: I0130 12:17:53.290105 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-sc9zl" Jan 30 12:17:53 crc kubenswrapper[4869]: I0130 12:17:53.297762 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 30 12:17:53 crc kubenswrapper[4869]: I0130 12:17:53.354399 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1dcb5f54-fb8c-453a-a320-a6504f9fa441-config\") pod \"ovn-northd-0\" (UID: \"1dcb5f54-fb8c-453a-a320-a6504f9fa441\") " pod="openstack/ovn-northd-0" Jan 30 12:17:53 crc kubenswrapper[4869]: I0130 12:17:53.354458 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1dcb5f54-fb8c-453a-a320-a6504f9fa441-scripts\") pod \"ovn-northd-0\" (UID: \"1dcb5f54-fb8c-453a-a320-a6504f9fa441\") " pod="openstack/ovn-northd-0" Jan 30 12:17:53 crc kubenswrapper[4869]: I0130 12:17:53.354539 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1dcb5f54-fb8c-453a-a320-a6504f9fa441-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"1dcb5f54-fb8c-453a-a320-a6504f9fa441\") " pod="openstack/ovn-northd-0" Jan 30 12:17:53 crc kubenswrapper[4869]: I0130 12:17:53.354622 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dcb5f54-fb8c-453a-a320-a6504f9fa441-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"1dcb5f54-fb8c-453a-a320-a6504f9fa441\") " pod="openstack/ovn-northd-0" Jan 30 12:17:53 crc kubenswrapper[4869]: I0130 12:17:53.354660 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b78sn\" (UniqueName: \"kubernetes.io/projected/1dcb5f54-fb8c-453a-a320-a6504f9fa441-kube-api-access-b78sn\") pod \"ovn-northd-0\" (UID: \"1dcb5f54-fb8c-453a-a320-a6504f9fa441\") " pod="openstack/ovn-northd-0" Jan 30 12:17:53 crc kubenswrapper[4869]: I0130 12:17:53.456755 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1dcb5f54-fb8c-453a-a320-a6504f9fa441-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"1dcb5f54-fb8c-453a-a320-a6504f9fa441\") " pod="openstack/ovn-northd-0" Jan 30 12:17:53 crc kubenswrapper[4869]: I0130 12:17:53.456858 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dcb5f54-fb8c-453a-a320-a6504f9fa441-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"1dcb5f54-fb8c-453a-a320-a6504f9fa441\") " pod="openstack/ovn-northd-0" Jan 30 12:17:53 crc kubenswrapper[4869]: I0130 12:17:53.456883 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b78sn\" (UniqueName: \"kubernetes.io/projected/1dcb5f54-fb8c-453a-a320-a6504f9fa441-kube-api-access-b78sn\") pod \"ovn-northd-0\" (UID: \"1dcb5f54-fb8c-453a-a320-a6504f9fa441\") " pod="openstack/ovn-northd-0" Jan 30 12:17:53 crc kubenswrapper[4869]: I0130 12:17:53.456961 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1dcb5f54-fb8c-453a-a320-a6504f9fa441-config\") pod \"ovn-northd-0\" (UID: \"1dcb5f54-fb8c-453a-a320-a6504f9fa441\") " pod="openstack/ovn-northd-0" Jan 30 12:17:53 crc kubenswrapper[4869]: I0130 12:17:53.456996 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1dcb5f54-fb8c-453a-a320-a6504f9fa441-scripts\") pod \"ovn-northd-0\" (UID: \"1dcb5f54-fb8c-453a-a320-a6504f9fa441\") " pod="openstack/ovn-northd-0" Jan 30 12:17:53 crc kubenswrapper[4869]: I0130 12:17:53.457326 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1dcb5f54-fb8c-453a-a320-a6504f9fa441-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"1dcb5f54-fb8c-453a-a320-a6504f9fa441\") " pod="openstack/ovn-northd-0" Jan 30 12:17:53 crc kubenswrapper[4869]: I0130 12:17:53.458132 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1dcb5f54-fb8c-453a-a320-a6504f9fa441-config\") pod \"ovn-northd-0\" (UID: \"1dcb5f54-fb8c-453a-a320-a6504f9fa441\") " pod="openstack/ovn-northd-0" Jan 30 12:17:53 crc kubenswrapper[4869]: I0130 12:17:53.458179 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1dcb5f54-fb8c-453a-a320-a6504f9fa441-scripts\") pod \"ovn-northd-0\" (UID: \"1dcb5f54-fb8c-453a-a320-a6504f9fa441\") " pod="openstack/ovn-northd-0" Jan 30 12:17:53 crc kubenswrapper[4869]: I0130 12:17:53.462837 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dcb5f54-fb8c-453a-a320-a6504f9fa441-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"1dcb5f54-fb8c-453a-a320-a6504f9fa441\") " pod="openstack/ovn-northd-0" Jan 30 12:17:53 crc kubenswrapper[4869]: I0130 12:17:53.476584 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b78sn\" (UniqueName: \"kubernetes.io/projected/1dcb5f54-fb8c-453a-a320-a6504f9fa441-kube-api-access-b78sn\") pod \"ovn-northd-0\" (UID: \"1dcb5f54-fb8c-453a-a320-a6504f9fa441\") " pod="openstack/ovn-northd-0" Jan 30 12:17:53 crc kubenswrapper[4869]: I0130 12:17:53.643064 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 30 12:17:54 crc kubenswrapper[4869]: I0130 12:17:54.142346 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="140c446e-0248-45f5-9bc8-d09918522fdb" path="/var/lib/kubelet/pods/140c446e-0248-45f5-9bc8-d09918522fdb/volumes" Jan 30 12:17:54 crc kubenswrapper[4869]: I0130 12:17:54.199981 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 30 12:17:54 crc kubenswrapper[4869]: W0130 12:17:54.208511 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1dcb5f54_fb8c_453a_a320_a6504f9fa441.slice/crio-2810485c5de1efc688470fe537c75a5ecd893f6c5c13c73a3780958f499321e8 WatchSource:0}: Error finding container 2810485c5de1efc688470fe537c75a5ecd893f6c5c13c73a3780958f499321e8: Status 404 returned error can't find the container with id 2810485c5de1efc688470fe537c75a5ecd893f6c5c13c73a3780958f499321e8 Jan 30 12:17:54 crc kubenswrapper[4869]: I0130 12:17:54.816694 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"1dcb5f54-fb8c-453a-a320-a6504f9fa441","Type":"ContainerStarted","Data":"a2ae927962be161e001717a7bd98872962858de624fb180dcbd2fd0c2140c106"} Jan 30 12:17:54 crc kubenswrapper[4869]: I0130 12:17:54.817258 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Jan 30 12:17:54 crc kubenswrapper[4869]: I0130 12:17:54.817273 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"1dcb5f54-fb8c-453a-a320-a6504f9fa441","Type":"ContainerStarted","Data":"3cf864ef450cd4d0477b70c6e3c9474fa9f24c0f8052abd6d0ad52eb43d2662a"} Jan 30 12:17:54 crc kubenswrapper[4869]: I0130 12:17:54.817287 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"1dcb5f54-fb8c-453a-a320-a6504f9fa441","Type":"ContainerStarted","Data":"2810485c5de1efc688470fe537c75a5ecd893f6c5c13c73a3780958f499321e8"} Jan 30 12:17:54 crc kubenswrapper[4869]: I0130 12:17:54.839837 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=1.839817542 podStartE2EDuration="1.839817542s" podCreationTimestamp="2026-01-30 12:17:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:17:54.834910173 +0000 UTC m=+5025.384786259" watchObservedRunningTime="2026-01-30 12:17:54.839817542 +0000 UTC m=+5025.389693608" Jan 30 12:17:58 crc kubenswrapper[4869]: I0130 12:17:58.381951 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-skkxw"] Jan 30 12:17:58 crc kubenswrapper[4869]: I0130 12:17:58.383694 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-skkxw" Jan 30 12:17:58 crc kubenswrapper[4869]: I0130 12:17:58.403121 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-skkxw"] Jan 30 12:17:58 crc kubenswrapper[4869]: I0130 12:17:58.490680 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-9a6e-account-create-update-fsr8d"] Jan 30 12:17:58 crc kubenswrapper[4869]: I0130 12:17:58.498011 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9a6e-account-create-update-fsr8d" Jan 30 12:17:58 crc kubenswrapper[4869]: I0130 12:17:58.503573 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Jan 30 12:17:58 crc kubenswrapper[4869]: I0130 12:17:58.508308 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9a6e-account-create-update-fsr8d"] Jan 30 12:17:58 crc kubenswrapper[4869]: I0130 12:17:58.551942 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgpgt\" (UniqueName: \"kubernetes.io/projected/9bf3ebb4-ddb0-4221-a918-11657d547507-kube-api-access-mgpgt\") pod \"keystone-db-create-skkxw\" (UID: \"9bf3ebb4-ddb0-4221-a918-11657d547507\") " pod="openstack/keystone-db-create-skkxw" Jan 30 12:17:58 crc kubenswrapper[4869]: I0130 12:17:58.552098 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9bf3ebb4-ddb0-4221-a918-11657d547507-operator-scripts\") pod \"keystone-db-create-skkxw\" (UID: \"9bf3ebb4-ddb0-4221-a918-11657d547507\") " pod="openstack/keystone-db-create-skkxw" Jan 30 12:17:58 crc kubenswrapper[4869]: I0130 12:17:58.653927 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5db9e4f5-3a4b-4021-b569-db288f1501f0-operator-scripts\") pod \"keystone-9a6e-account-create-update-fsr8d\" (UID: \"5db9e4f5-3a4b-4021-b569-db288f1501f0\") " pod="openstack/keystone-9a6e-account-create-update-fsr8d" Jan 30 12:17:58 crc kubenswrapper[4869]: I0130 12:17:58.654002 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgpgt\" (UniqueName: \"kubernetes.io/projected/9bf3ebb4-ddb0-4221-a918-11657d547507-kube-api-access-mgpgt\") pod \"keystone-db-create-skkxw\" (UID: \"9bf3ebb4-ddb0-4221-a918-11657d547507\") " pod="openstack/keystone-db-create-skkxw" Jan 30 12:17:58 crc kubenswrapper[4869]: I0130 12:17:58.654071 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8zsw\" (UniqueName: \"kubernetes.io/projected/5db9e4f5-3a4b-4021-b569-db288f1501f0-kube-api-access-s8zsw\") pod \"keystone-9a6e-account-create-update-fsr8d\" (UID: \"5db9e4f5-3a4b-4021-b569-db288f1501f0\") " pod="openstack/keystone-9a6e-account-create-update-fsr8d" Jan 30 12:17:58 crc kubenswrapper[4869]: I0130 12:17:58.654097 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9bf3ebb4-ddb0-4221-a918-11657d547507-operator-scripts\") pod \"keystone-db-create-skkxw\" (UID: \"9bf3ebb4-ddb0-4221-a918-11657d547507\") " pod="openstack/keystone-db-create-skkxw" Jan 30 12:17:58 crc kubenswrapper[4869]: I0130 12:17:58.654959 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9bf3ebb4-ddb0-4221-a918-11657d547507-operator-scripts\") pod \"keystone-db-create-skkxw\" (UID: \"9bf3ebb4-ddb0-4221-a918-11657d547507\") " pod="openstack/keystone-db-create-skkxw" Jan 30 12:17:58 crc kubenswrapper[4869]: I0130 12:17:58.682587 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgpgt\" (UniqueName: \"kubernetes.io/projected/9bf3ebb4-ddb0-4221-a918-11657d547507-kube-api-access-mgpgt\") pod \"keystone-db-create-skkxw\" (UID: \"9bf3ebb4-ddb0-4221-a918-11657d547507\") " pod="openstack/keystone-db-create-skkxw" Jan 30 12:17:58 crc kubenswrapper[4869]: I0130 12:17:58.708372 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-skkxw" Jan 30 12:17:58 crc kubenswrapper[4869]: I0130 12:17:58.755162 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8zsw\" (UniqueName: \"kubernetes.io/projected/5db9e4f5-3a4b-4021-b569-db288f1501f0-kube-api-access-s8zsw\") pod \"keystone-9a6e-account-create-update-fsr8d\" (UID: \"5db9e4f5-3a4b-4021-b569-db288f1501f0\") " pod="openstack/keystone-9a6e-account-create-update-fsr8d" Jan 30 12:17:58 crc kubenswrapper[4869]: I0130 12:17:58.755623 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5db9e4f5-3a4b-4021-b569-db288f1501f0-operator-scripts\") pod \"keystone-9a6e-account-create-update-fsr8d\" (UID: \"5db9e4f5-3a4b-4021-b569-db288f1501f0\") " pod="openstack/keystone-9a6e-account-create-update-fsr8d" Jan 30 12:17:58 crc kubenswrapper[4869]: I0130 12:17:58.756329 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5db9e4f5-3a4b-4021-b569-db288f1501f0-operator-scripts\") pod \"keystone-9a6e-account-create-update-fsr8d\" (UID: \"5db9e4f5-3a4b-4021-b569-db288f1501f0\") " pod="openstack/keystone-9a6e-account-create-update-fsr8d" Jan 30 12:17:58 crc kubenswrapper[4869]: I0130 12:17:58.776371 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8zsw\" (UniqueName: \"kubernetes.io/projected/5db9e4f5-3a4b-4021-b569-db288f1501f0-kube-api-access-s8zsw\") pod \"keystone-9a6e-account-create-update-fsr8d\" (UID: \"5db9e4f5-3a4b-4021-b569-db288f1501f0\") " pod="openstack/keystone-9a6e-account-create-update-fsr8d" Jan 30 12:17:58 crc kubenswrapper[4869]: I0130 12:17:58.819262 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9a6e-account-create-update-fsr8d" Jan 30 12:17:59 crc kubenswrapper[4869]: W0130 12:17:59.158013 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9bf3ebb4_ddb0_4221_a918_11657d547507.slice/crio-f698498f6c33e4d47e9f95f9f7e115413f378529d38c97c5d403efd725bf2150 WatchSource:0}: Error finding container f698498f6c33e4d47e9f95f9f7e115413f378529d38c97c5d403efd725bf2150: Status 404 returned error can't find the container with id f698498f6c33e4d47e9f95f9f7e115413f378529d38c97c5d403efd725bf2150 Jan 30 12:17:59 crc kubenswrapper[4869]: I0130 12:17:59.162885 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-skkxw"] Jan 30 12:17:59 crc kubenswrapper[4869]: I0130 12:17:59.270605 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9a6e-account-create-update-fsr8d"] Jan 30 12:17:59 crc kubenswrapper[4869]: W0130 12:17:59.272233 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5db9e4f5_3a4b_4021_b569_db288f1501f0.slice/crio-1bfa887d857e030f1fe1edd7e679cbfafac68086d64530e12bce16201f0b2d10 WatchSource:0}: Error finding container 1bfa887d857e030f1fe1edd7e679cbfafac68086d64530e12bce16201f0b2d10: Status 404 returned error can't find the container with id 1bfa887d857e030f1fe1edd7e679cbfafac68086d64530e12bce16201f0b2d10 Jan 30 12:17:59 crc kubenswrapper[4869]: I0130 12:17:59.853453 4869 generic.go:334] "Generic (PLEG): container finished" podID="9bf3ebb4-ddb0-4221-a918-11657d547507" containerID="7de40206275454617b976728062ff3694a4e99042e112e32f5b9ddf526ea43dd" exitCode=0 Jan 30 12:17:59 crc kubenswrapper[4869]: I0130 12:17:59.853525 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-skkxw" event={"ID":"9bf3ebb4-ddb0-4221-a918-11657d547507","Type":"ContainerDied","Data":"7de40206275454617b976728062ff3694a4e99042e112e32f5b9ddf526ea43dd"} Jan 30 12:17:59 crc kubenswrapper[4869]: I0130 12:17:59.853561 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-skkxw" event={"ID":"9bf3ebb4-ddb0-4221-a918-11657d547507","Type":"ContainerStarted","Data":"f698498f6c33e4d47e9f95f9f7e115413f378529d38c97c5d403efd725bf2150"} Jan 30 12:17:59 crc kubenswrapper[4869]: I0130 12:17:59.855437 4869 generic.go:334] "Generic (PLEG): container finished" podID="5db9e4f5-3a4b-4021-b569-db288f1501f0" containerID="83f6a94b8e1c0a32362066114f2fae94a1bfa36287e2ab31e30749a064128142" exitCode=0 Jan 30 12:17:59 crc kubenswrapper[4869]: I0130 12:17:59.855466 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9a6e-account-create-update-fsr8d" event={"ID":"5db9e4f5-3a4b-4021-b569-db288f1501f0","Type":"ContainerDied","Data":"83f6a94b8e1c0a32362066114f2fae94a1bfa36287e2ab31e30749a064128142"} Jan 30 12:17:59 crc kubenswrapper[4869]: I0130 12:17:59.855485 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9a6e-account-create-update-fsr8d" event={"ID":"5db9e4f5-3a4b-4021-b569-db288f1501f0","Type":"ContainerStarted","Data":"1bfa887d857e030f1fe1edd7e679cbfafac68086d64530e12bce16201f0b2d10"} Jan 30 12:18:01 crc kubenswrapper[4869]: I0130 12:18:01.298313 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-skkxw" Jan 30 12:18:01 crc kubenswrapper[4869]: I0130 12:18:01.311377 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9a6e-account-create-update-fsr8d" Jan 30 12:18:01 crc kubenswrapper[4869]: I0130 12:18:01.406802 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgpgt\" (UniqueName: \"kubernetes.io/projected/9bf3ebb4-ddb0-4221-a918-11657d547507-kube-api-access-mgpgt\") pod \"9bf3ebb4-ddb0-4221-a918-11657d547507\" (UID: \"9bf3ebb4-ddb0-4221-a918-11657d547507\") " Jan 30 12:18:01 crc kubenswrapper[4869]: I0130 12:18:01.406980 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s8zsw\" (UniqueName: \"kubernetes.io/projected/5db9e4f5-3a4b-4021-b569-db288f1501f0-kube-api-access-s8zsw\") pod \"5db9e4f5-3a4b-4021-b569-db288f1501f0\" (UID: \"5db9e4f5-3a4b-4021-b569-db288f1501f0\") " Jan 30 12:18:01 crc kubenswrapper[4869]: I0130 12:18:01.407030 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5db9e4f5-3a4b-4021-b569-db288f1501f0-operator-scripts\") pod \"5db9e4f5-3a4b-4021-b569-db288f1501f0\" (UID: \"5db9e4f5-3a4b-4021-b569-db288f1501f0\") " Jan 30 12:18:01 crc kubenswrapper[4869]: I0130 12:18:01.407139 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9bf3ebb4-ddb0-4221-a918-11657d547507-operator-scripts\") pod \"9bf3ebb4-ddb0-4221-a918-11657d547507\" (UID: \"9bf3ebb4-ddb0-4221-a918-11657d547507\") " Jan 30 12:18:01 crc kubenswrapper[4869]: I0130 12:18:01.408105 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9bf3ebb4-ddb0-4221-a918-11657d547507-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9bf3ebb4-ddb0-4221-a918-11657d547507" (UID: "9bf3ebb4-ddb0-4221-a918-11657d547507"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 12:18:01 crc kubenswrapper[4869]: I0130 12:18:01.408133 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5db9e4f5-3a4b-4021-b569-db288f1501f0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5db9e4f5-3a4b-4021-b569-db288f1501f0" (UID: "5db9e4f5-3a4b-4021-b569-db288f1501f0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 12:18:01 crc kubenswrapper[4869]: I0130 12:18:01.408939 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5db9e4f5-3a4b-4021-b569-db288f1501f0-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 12:18:01 crc kubenswrapper[4869]: I0130 12:18:01.408978 4869 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9bf3ebb4-ddb0-4221-a918-11657d547507-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 12:18:01 crc kubenswrapper[4869]: I0130 12:18:01.413154 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9bf3ebb4-ddb0-4221-a918-11657d547507-kube-api-access-mgpgt" (OuterVolumeSpecName: "kube-api-access-mgpgt") pod "9bf3ebb4-ddb0-4221-a918-11657d547507" (UID: "9bf3ebb4-ddb0-4221-a918-11657d547507"). InnerVolumeSpecName "kube-api-access-mgpgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:18:01 crc kubenswrapper[4869]: I0130 12:18:01.413313 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5db9e4f5-3a4b-4021-b569-db288f1501f0-kube-api-access-s8zsw" (OuterVolumeSpecName: "kube-api-access-s8zsw") pod "5db9e4f5-3a4b-4021-b569-db288f1501f0" (UID: "5db9e4f5-3a4b-4021-b569-db288f1501f0"). InnerVolumeSpecName "kube-api-access-s8zsw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:18:01 crc kubenswrapper[4869]: I0130 12:18:01.510515 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgpgt\" (UniqueName: \"kubernetes.io/projected/9bf3ebb4-ddb0-4221-a918-11657d547507-kube-api-access-mgpgt\") on node \"crc\" DevicePath \"\"" Jan 30 12:18:01 crc kubenswrapper[4869]: I0130 12:18:01.510896 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s8zsw\" (UniqueName: \"kubernetes.io/projected/5db9e4f5-3a4b-4021-b569-db288f1501f0-kube-api-access-s8zsw\") on node \"crc\" DevicePath \"\"" Jan 30 12:18:01 crc kubenswrapper[4869]: I0130 12:18:01.871049 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9a6e-account-create-update-fsr8d" Jan 30 12:18:01 crc kubenswrapper[4869]: I0130 12:18:01.871071 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9a6e-account-create-update-fsr8d" event={"ID":"5db9e4f5-3a4b-4021-b569-db288f1501f0","Type":"ContainerDied","Data":"1bfa887d857e030f1fe1edd7e679cbfafac68086d64530e12bce16201f0b2d10"} Jan 30 12:18:01 crc kubenswrapper[4869]: I0130 12:18:01.871117 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1bfa887d857e030f1fe1edd7e679cbfafac68086d64530e12bce16201f0b2d10" Jan 30 12:18:01 crc kubenswrapper[4869]: I0130 12:18:01.872548 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-skkxw" event={"ID":"9bf3ebb4-ddb0-4221-a918-11657d547507","Type":"ContainerDied","Data":"f698498f6c33e4d47e9f95f9f7e115413f378529d38c97c5d403efd725bf2150"} Jan 30 12:18:01 crc kubenswrapper[4869]: I0130 12:18:01.872584 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f698498f6c33e4d47e9f95f9f7e115413f378529d38c97c5d403efd725bf2150" Jan 30 12:18:01 crc kubenswrapper[4869]: I0130 12:18:01.872631 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-skkxw" Jan 30 12:18:04 crc kubenswrapper[4869]: I0130 12:18:04.010834 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-sh22r"] Jan 30 12:18:04 crc kubenswrapper[4869]: E0130 12:18:04.011261 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5db9e4f5-3a4b-4021-b569-db288f1501f0" containerName="mariadb-account-create-update" Jan 30 12:18:04 crc kubenswrapper[4869]: I0130 12:18:04.011278 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5db9e4f5-3a4b-4021-b569-db288f1501f0" containerName="mariadb-account-create-update" Jan 30 12:18:04 crc kubenswrapper[4869]: E0130 12:18:04.011295 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bf3ebb4-ddb0-4221-a918-11657d547507" containerName="mariadb-database-create" Jan 30 12:18:04 crc kubenswrapper[4869]: I0130 12:18:04.011301 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bf3ebb4-ddb0-4221-a918-11657d547507" containerName="mariadb-database-create" Jan 30 12:18:04 crc kubenswrapper[4869]: I0130 12:18:04.011477 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="5db9e4f5-3a4b-4021-b569-db288f1501f0" containerName="mariadb-account-create-update" Jan 30 12:18:04 crc kubenswrapper[4869]: I0130 12:18:04.011487 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bf3ebb4-ddb0-4221-a918-11657d547507" containerName="mariadb-database-create" Jan 30 12:18:04 crc kubenswrapper[4869]: I0130 12:18:04.012223 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-sh22r" Jan 30 12:18:04 crc kubenswrapper[4869]: I0130 12:18:04.014750 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-qkgf7" Jan 30 12:18:04 crc kubenswrapper[4869]: I0130 12:18:04.015011 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 30 12:18:04 crc kubenswrapper[4869]: I0130 12:18:04.015159 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 30 12:18:04 crc kubenswrapper[4869]: I0130 12:18:04.015872 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 30 12:18:04 crc kubenswrapper[4869]: I0130 12:18:04.032565 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-sh22r"] Jan 30 12:18:04 crc kubenswrapper[4869]: I0130 12:18:04.158994 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqxsc\" (UniqueName: \"kubernetes.io/projected/9b06a4bb-d363-4877-a91a-e42d56568285-kube-api-access-gqxsc\") pod \"keystone-db-sync-sh22r\" (UID: \"9b06a4bb-d363-4877-a91a-e42d56568285\") " pod="openstack/keystone-db-sync-sh22r" Jan 30 12:18:04 crc kubenswrapper[4869]: I0130 12:18:04.159098 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b06a4bb-d363-4877-a91a-e42d56568285-config-data\") pod \"keystone-db-sync-sh22r\" (UID: \"9b06a4bb-d363-4877-a91a-e42d56568285\") " pod="openstack/keystone-db-sync-sh22r" Jan 30 12:18:04 crc kubenswrapper[4869]: I0130 12:18:04.159352 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b06a4bb-d363-4877-a91a-e42d56568285-combined-ca-bundle\") pod \"keystone-db-sync-sh22r\" (UID: \"9b06a4bb-d363-4877-a91a-e42d56568285\") " pod="openstack/keystone-db-sync-sh22r" Jan 30 12:18:04 crc kubenswrapper[4869]: I0130 12:18:04.262009 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b06a4bb-d363-4877-a91a-e42d56568285-combined-ca-bundle\") pod \"keystone-db-sync-sh22r\" (UID: \"9b06a4bb-d363-4877-a91a-e42d56568285\") " pod="openstack/keystone-db-sync-sh22r" Jan 30 12:18:04 crc kubenswrapper[4869]: I0130 12:18:04.262241 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqxsc\" (UniqueName: \"kubernetes.io/projected/9b06a4bb-d363-4877-a91a-e42d56568285-kube-api-access-gqxsc\") pod \"keystone-db-sync-sh22r\" (UID: \"9b06a4bb-d363-4877-a91a-e42d56568285\") " pod="openstack/keystone-db-sync-sh22r" Jan 30 12:18:04 crc kubenswrapper[4869]: I0130 12:18:04.262310 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b06a4bb-d363-4877-a91a-e42d56568285-config-data\") pod \"keystone-db-sync-sh22r\" (UID: \"9b06a4bb-d363-4877-a91a-e42d56568285\") " pod="openstack/keystone-db-sync-sh22r" Jan 30 12:18:04 crc kubenswrapper[4869]: I0130 12:18:04.268629 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b06a4bb-d363-4877-a91a-e42d56568285-config-data\") pod \"keystone-db-sync-sh22r\" (UID: \"9b06a4bb-d363-4877-a91a-e42d56568285\") " pod="openstack/keystone-db-sync-sh22r" Jan 30 12:18:04 crc kubenswrapper[4869]: I0130 12:18:04.274608 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b06a4bb-d363-4877-a91a-e42d56568285-combined-ca-bundle\") pod \"keystone-db-sync-sh22r\" (UID: \"9b06a4bb-d363-4877-a91a-e42d56568285\") " pod="openstack/keystone-db-sync-sh22r" Jan 30 12:18:04 crc kubenswrapper[4869]: I0130 12:18:04.280590 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqxsc\" (UniqueName: \"kubernetes.io/projected/9b06a4bb-d363-4877-a91a-e42d56568285-kube-api-access-gqxsc\") pod \"keystone-db-sync-sh22r\" (UID: \"9b06a4bb-d363-4877-a91a-e42d56568285\") " pod="openstack/keystone-db-sync-sh22r" Jan 30 12:18:04 crc kubenswrapper[4869]: I0130 12:18:04.331639 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-sh22r" Jan 30 12:18:04 crc kubenswrapper[4869]: I0130 12:18:04.808549 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-sh22r"] Jan 30 12:18:04 crc kubenswrapper[4869]: I0130 12:18:04.899279 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-sh22r" event={"ID":"9b06a4bb-d363-4877-a91a-e42d56568285","Type":"ContainerStarted","Data":"fc6e6923135c7cac192eca0ba3966c028e7f7f5660ae15216f70930e321b6fd3"} Jan 30 12:18:05 crc kubenswrapper[4869]: I0130 12:18:05.914950 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-sh22r" event={"ID":"9b06a4bb-d363-4877-a91a-e42d56568285","Type":"ContainerStarted","Data":"ba325cca243ce97470dad2e5d5da64e22b7ee6482551b1062176cd4a5c1c5cae"} Jan 30 12:18:05 crc kubenswrapper[4869]: I0130 12:18:05.939035 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-sh22r" podStartSLOduration=2.939009394 podStartE2EDuration="2.939009394s" podCreationTimestamp="2026-01-30 12:18:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:18:05.932684664 +0000 UTC m=+5036.482560730" watchObservedRunningTime="2026-01-30 12:18:05.939009394 +0000 UTC m=+5036.488885460" Jan 30 12:18:06 crc kubenswrapper[4869]: I0130 12:18:06.924281 4869 generic.go:334] "Generic (PLEG): container finished" podID="9b06a4bb-d363-4877-a91a-e42d56568285" containerID="ba325cca243ce97470dad2e5d5da64e22b7ee6482551b1062176cd4a5c1c5cae" exitCode=0 Jan 30 12:18:06 crc kubenswrapper[4869]: I0130 12:18:06.924404 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-sh22r" event={"ID":"9b06a4bb-d363-4877-a91a-e42d56568285","Type":"ContainerDied","Data":"ba325cca243ce97470dad2e5d5da64e22b7ee6482551b1062176cd4a5c1c5cae"} Jan 30 12:18:08 crc kubenswrapper[4869]: I0130 12:18:08.312779 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-sh22r" Jan 30 12:18:08 crc kubenswrapper[4869]: I0130 12:18:08.434825 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b06a4bb-d363-4877-a91a-e42d56568285-combined-ca-bundle\") pod \"9b06a4bb-d363-4877-a91a-e42d56568285\" (UID: \"9b06a4bb-d363-4877-a91a-e42d56568285\") " Jan 30 12:18:08 crc kubenswrapper[4869]: I0130 12:18:08.435073 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gqxsc\" (UniqueName: \"kubernetes.io/projected/9b06a4bb-d363-4877-a91a-e42d56568285-kube-api-access-gqxsc\") pod \"9b06a4bb-d363-4877-a91a-e42d56568285\" (UID: \"9b06a4bb-d363-4877-a91a-e42d56568285\") " Jan 30 12:18:08 crc kubenswrapper[4869]: I0130 12:18:08.435139 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b06a4bb-d363-4877-a91a-e42d56568285-config-data\") pod \"9b06a4bb-d363-4877-a91a-e42d56568285\" (UID: \"9b06a4bb-d363-4877-a91a-e42d56568285\") " Jan 30 12:18:08 crc kubenswrapper[4869]: I0130 12:18:08.441067 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b06a4bb-d363-4877-a91a-e42d56568285-kube-api-access-gqxsc" (OuterVolumeSpecName: "kube-api-access-gqxsc") pod "9b06a4bb-d363-4877-a91a-e42d56568285" (UID: "9b06a4bb-d363-4877-a91a-e42d56568285"). InnerVolumeSpecName "kube-api-access-gqxsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:18:08 crc kubenswrapper[4869]: I0130 12:18:08.458272 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b06a4bb-d363-4877-a91a-e42d56568285-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9b06a4bb-d363-4877-a91a-e42d56568285" (UID: "9b06a4bb-d363-4877-a91a-e42d56568285"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 12:18:08 crc kubenswrapper[4869]: I0130 12:18:08.488474 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b06a4bb-d363-4877-a91a-e42d56568285-config-data" (OuterVolumeSpecName: "config-data") pod "9b06a4bb-d363-4877-a91a-e42d56568285" (UID: "9b06a4bb-d363-4877-a91a-e42d56568285"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 12:18:08 crc kubenswrapper[4869]: I0130 12:18:08.537840 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gqxsc\" (UniqueName: \"kubernetes.io/projected/9b06a4bb-d363-4877-a91a-e42d56568285-kube-api-access-gqxsc\") on node \"crc\" DevicePath \"\"" Jan 30 12:18:08 crc kubenswrapper[4869]: I0130 12:18:08.537894 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b06a4bb-d363-4877-a91a-e42d56568285-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 12:18:08 crc kubenswrapper[4869]: I0130 12:18:08.537904 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b06a4bb-d363-4877-a91a-e42d56568285-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 12:18:08 crc kubenswrapper[4869]: I0130 12:18:08.942222 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-sh22r" event={"ID":"9b06a4bb-d363-4877-a91a-e42d56568285","Type":"ContainerDied","Data":"fc6e6923135c7cac192eca0ba3966c028e7f7f5660ae15216f70930e321b6fd3"} Jan 30 12:18:08 crc kubenswrapper[4869]: I0130 12:18:08.942280 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fc6e6923135c7cac192eca0ba3966c028e7f7f5660ae15216f70930e321b6fd3" Jan 30 12:18:08 crc kubenswrapper[4869]: I0130 12:18:08.942315 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-sh22r" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.173435 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67b648cdf7-q9tdl"] Jan 30 12:18:09 crc kubenswrapper[4869]: E0130 12:18:09.173781 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b06a4bb-d363-4877-a91a-e42d56568285" containerName="keystone-db-sync" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.173799 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b06a4bb-d363-4877-a91a-e42d56568285" containerName="keystone-db-sync" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.173944 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b06a4bb-d363-4877-a91a-e42d56568285" containerName="keystone-db-sync" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.174791 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b648cdf7-q9tdl" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.200868 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b648cdf7-q9tdl"] Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.232318 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-tqmvq"] Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.233513 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tqmvq" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.235610 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.235882 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.236329 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.236481 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.237463 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-qkgf7" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.250244 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b8fb3786-2ed0-4f33-8528-8c86b8a69c87-ovsdbserver-nb\") pod \"dnsmasq-dns-67b648cdf7-q9tdl\" (UID: \"b8fb3786-2ed0-4f33-8528-8c86b8a69c87\") " pod="openstack/dnsmasq-dns-67b648cdf7-q9tdl" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.250327 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sfctn\" (UniqueName: \"kubernetes.io/projected/b8fb3786-2ed0-4f33-8528-8c86b8a69c87-kube-api-access-sfctn\") pod \"dnsmasq-dns-67b648cdf7-q9tdl\" (UID: \"b8fb3786-2ed0-4f33-8528-8c86b8a69c87\") " pod="openstack/dnsmasq-dns-67b648cdf7-q9tdl" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.250353 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b8fb3786-2ed0-4f33-8528-8c86b8a69c87-ovsdbserver-sb\") pod \"dnsmasq-dns-67b648cdf7-q9tdl\" (UID: \"b8fb3786-2ed0-4f33-8528-8c86b8a69c87\") " pod="openstack/dnsmasq-dns-67b648cdf7-q9tdl" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.250393 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b8fb3786-2ed0-4f33-8528-8c86b8a69c87-dns-svc\") pod \"dnsmasq-dns-67b648cdf7-q9tdl\" (UID: \"b8fb3786-2ed0-4f33-8528-8c86b8a69c87\") " pod="openstack/dnsmasq-dns-67b648cdf7-q9tdl" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.250459 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8fb3786-2ed0-4f33-8528-8c86b8a69c87-config\") pod \"dnsmasq-dns-67b648cdf7-q9tdl\" (UID: \"b8fb3786-2ed0-4f33-8528-8c86b8a69c87\") " pod="openstack/dnsmasq-dns-67b648cdf7-q9tdl" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.252755 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-tqmvq"] Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.352417 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-combined-ca-bundle\") pod \"keystone-bootstrap-tqmvq\" (UID: \"ca9bde57-4216-42b4-bac5-8c614648bac0\") " pod="openstack/keystone-bootstrap-tqmvq" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.352490 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-scripts\") pod \"keystone-bootstrap-tqmvq\" (UID: \"ca9bde57-4216-42b4-bac5-8c614648bac0\") " pod="openstack/keystone-bootstrap-tqmvq" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.352531 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b8fb3786-2ed0-4f33-8528-8c86b8a69c87-dns-svc\") pod \"dnsmasq-dns-67b648cdf7-q9tdl\" (UID: \"b8fb3786-2ed0-4f33-8528-8c86b8a69c87\") " pod="openstack/dnsmasq-dns-67b648cdf7-q9tdl" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.352669 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-config-data\") pod \"keystone-bootstrap-tqmvq\" (UID: \"ca9bde57-4216-42b4-bac5-8c614648bac0\") " pod="openstack/keystone-bootstrap-tqmvq" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.352923 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zg464\" (UniqueName: \"kubernetes.io/projected/ca9bde57-4216-42b4-bac5-8c614648bac0-kube-api-access-zg464\") pod \"keystone-bootstrap-tqmvq\" (UID: \"ca9bde57-4216-42b4-bac5-8c614648bac0\") " pod="openstack/keystone-bootstrap-tqmvq" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.353011 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-fernet-keys\") pod \"keystone-bootstrap-tqmvq\" (UID: \"ca9bde57-4216-42b4-bac5-8c614648bac0\") " pod="openstack/keystone-bootstrap-tqmvq" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.353113 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8fb3786-2ed0-4f33-8528-8c86b8a69c87-config\") pod \"dnsmasq-dns-67b648cdf7-q9tdl\" (UID: \"b8fb3786-2ed0-4f33-8528-8c86b8a69c87\") " pod="openstack/dnsmasq-dns-67b648cdf7-q9tdl" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.353328 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b8fb3786-2ed0-4f33-8528-8c86b8a69c87-ovsdbserver-nb\") pod \"dnsmasq-dns-67b648cdf7-q9tdl\" (UID: \"b8fb3786-2ed0-4f33-8528-8c86b8a69c87\") " pod="openstack/dnsmasq-dns-67b648cdf7-q9tdl" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.353440 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-credential-keys\") pod \"keystone-bootstrap-tqmvq\" (UID: \"ca9bde57-4216-42b4-bac5-8c614648bac0\") " pod="openstack/keystone-bootstrap-tqmvq" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.353486 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sfctn\" (UniqueName: \"kubernetes.io/projected/b8fb3786-2ed0-4f33-8528-8c86b8a69c87-kube-api-access-sfctn\") pod \"dnsmasq-dns-67b648cdf7-q9tdl\" (UID: \"b8fb3786-2ed0-4f33-8528-8c86b8a69c87\") " pod="openstack/dnsmasq-dns-67b648cdf7-q9tdl" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.353524 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b8fb3786-2ed0-4f33-8528-8c86b8a69c87-ovsdbserver-sb\") pod \"dnsmasq-dns-67b648cdf7-q9tdl\" (UID: \"b8fb3786-2ed0-4f33-8528-8c86b8a69c87\") " pod="openstack/dnsmasq-dns-67b648cdf7-q9tdl" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.353679 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b8fb3786-2ed0-4f33-8528-8c86b8a69c87-dns-svc\") pod \"dnsmasq-dns-67b648cdf7-q9tdl\" (UID: \"b8fb3786-2ed0-4f33-8528-8c86b8a69c87\") " pod="openstack/dnsmasq-dns-67b648cdf7-q9tdl" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.354476 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8fb3786-2ed0-4f33-8528-8c86b8a69c87-config\") pod \"dnsmasq-dns-67b648cdf7-q9tdl\" (UID: \"b8fb3786-2ed0-4f33-8528-8c86b8a69c87\") " pod="openstack/dnsmasq-dns-67b648cdf7-q9tdl" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.354667 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b8fb3786-2ed0-4f33-8528-8c86b8a69c87-ovsdbserver-sb\") pod \"dnsmasq-dns-67b648cdf7-q9tdl\" (UID: \"b8fb3786-2ed0-4f33-8528-8c86b8a69c87\") " pod="openstack/dnsmasq-dns-67b648cdf7-q9tdl" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.354827 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b8fb3786-2ed0-4f33-8528-8c86b8a69c87-ovsdbserver-nb\") pod \"dnsmasq-dns-67b648cdf7-q9tdl\" (UID: \"b8fb3786-2ed0-4f33-8528-8c86b8a69c87\") " pod="openstack/dnsmasq-dns-67b648cdf7-q9tdl" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.373526 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sfctn\" (UniqueName: \"kubernetes.io/projected/b8fb3786-2ed0-4f33-8528-8c86b8a69c87-kube-api-access-sfctn\") pod \"dnsmasq-dns-67b648cdf7-q9tdl\" (UID: \"b8fb3786-2ed0-4f33-8528-8c86b8a69c87\") " pod="openstack/dnsmasq-dns-67b648cdf7-q9tdl" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.454847 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-credential-keys\") pod \"keystone-bootstrap-tqmvq\" (UID: \"ca9bde57-4216-42b4-bac5-8c614648bac0\") " pod="openstack/keystone-bootstrap-tqmvq" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.455303 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-combined-ca-bundle\") pod \"keystone-bootstrap-tqmvq\" (UID: \"ca9bde57-4216-42b4-bac5-8c614648bac0\") " pod="openstack/keystone-bootstrap-tqmvq" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.455331 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-scripts\") pod \"keystone-bootstrap-tqmvq\" (UID: \"ca9bde57-4216-42b4-bac5-8c614648bac0\") " pod="openstack/keystone-bootstrap-tqmvq" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.455378 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-config-data\") pod \"keystone-bootstrap-tqmvq\" (UID: \"ca9bde57-4216-42b4-bac5-8c614648bac0\") " pod="openstack/keystone-bootstrap-tqmvq" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.455423 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zg464\" (UniqueName: \"kubernetes.io/projected/ca9bde57-4216-42b4-bac5-8c614648bac0-kube-api-access-zg464\") pod \"keystone-bootstrap-tqmvq\" (UID: \"ca9bde57-4216-42b4-bac5-8c614648bac0\") " pod="openstack/keystone-bootstrap-tqmvq" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.455453 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-fernet-keys\") pod \"keystone-bootstrap-tqmvq\" (UID: \"ca9bde57-4216-42b4-bac5-8c614648bac0\") " pod="openstack/keystone-bootstrap-tqmvq" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.459154 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-combined-ca-bundle\") pod \"keystone-bootstrap-tqmvq\" (UID: \"ca9bde57-4216-42b4-bac5-8c614648bac0\") " pod="openstack/keystone-bootstrap-tqmvq" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.460523 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-scripts\") pod \"keystone-bootstrap-tqmvq\" (UID: \"ca9bde57-4216-42b4-bac5-8c614648bac0\") " pod="openstack/keystone-bootstrap-tqmvq" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.469612 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-fernet-keys\") pod \"keystone-bootstrap-tqmvq\" (UID: \"ca9bde57-4216-42b4-bac5-8c614648bac0\") " pod="openstack/keystone-bootstrap-tqmvq" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.470994 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-config-data\") pod \"keystone-bootstrap-tqmvq\" (UID: \"ca9bde57-4216-42b4-bac5-8c614648bac0\") " pod="openstack/keystone-bootstrap-tqmvq" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.476196 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-credential-keys\") pod \"keystone-bootstrap-tqmvq\" (UID: \"ca9bde57-4216-42b4-bac5-8c614648bac0\") " pod="openstack/keystone-bootstrap-tqmvq" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.476688 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zg464\" (UniqueName: \"kubernetes.io/projected/ca9bde57-4216-42b4-bac5-8c614648bac0-kube-api-access-zg464\") pod \"keystone-bootstrap-tqmvq\" (UID: \"ca9bde57-4216-42b4-bac5-8c614648bac0\") " pod="openstack/keystone-bootstrap-tqmvq" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.498760 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b648cdf7-q9tdl" Jan 30 12:18:09 crc kubenswrapper[4869]: I0130 12:18:09.562850 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tqmvq" Jan 30 12:18:10 crc kubenswrapper[4869]: I0130 12:18:10.036156 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b648cdf7-q9tdl"] Jan 30 12:18:10 crc kubenswrapper[4869]: W0130 12:18:10.040197 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb8fb3786_2ed0_4f33_8528_8c86b8a69c87.slice/crio-6973e7b0e7ca982e2136c97ce7737b53e0fc3030b45f92b2aa8799445340bb6e WatchSource:0}: Error finding container 6973e7b0e7ca982e2136c97ce7737b53e0fc3030b45f92b2aa8799445340bb6e: Status 404 returned error can't find the container with id 6973e7b0e7ca982e2136c97ce7737b53e0fc3030b45f92b2aa8799445340bb6e Jan 30 12:18:10 crc kubenswrapper[4869]: I0130 12:18:10.158244 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-tqmvq"] Jan 30 12:18:10 crc kubenswrapper[4869]: I0130 12:18:10.164378 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 30 12:18:10 crc kubenswrapper[4869]: I0130 12:18:10.958373 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tqmvq" event={"ID":"ca9bde57-4216-42b4-bac5-8c614648bac0","Type":"ContainerStarted","Data":"bdabdc64213bb2dbb01c863ab4ef1f268441f2371549453363593e4f4b1cd2c8"} Jan 30 12:18:10 crc kubenswrapper[4869]: I0130 12:18:10.958782 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tqmvq" event={"ID":"ca9bde57-4216-42b4-bac5-8c614648bac0","Type":"ContainerStarted","Data":"60b18d9673ab38d8ce8dfc9cb78bc80db92e71601e7a4635d3df0ce9345e4e21"} Jan 30 12:18:10 crc kubenswrapper[4869]: I0130 12:18:10.961607 4869 generic.go:334] "Generic (PLEG): container finished" podID="b8fb3786-2ed0-4f33-8528-8c86b8a69c87" containerID="e233c0c32e7da08356ea9524420f3206f6dabfe7f013028ca20b51894ee29796" exitCode=0 Jan 30 12:18:10 crc kubenswrapper[4869]: I0130 12:18:10.961649 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b648cdf7-q9tdl" event={"ID":"b8fb3786-2ed0-4f33-8528-8c86b8a69c87","Type":"ContainerDied","Data":"e233c0c32e7da08356ea9524420f3206f6dabfe7f013028ca20b51894ee29796"} Jan 30 12:18:10 crc kubenswrapper[4869]: I0130 12:18:10.961672 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b648cdf7-q9tdl" event={"ID":"b8fb3786-2ed0-4f33-8528-8c86b8a69c87","Type":"ContainerStarted","Data":"6973e7b0e7ca982e2136c97ce7737b53e0fc3030b45f92b2aa8799445340bb6e"} Jan 30 12:18:10 crc kubenswrapper[4869]: I0130 12:18:10.990777 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-tqmvq" podStartSLOduration=1.9907525879999999 podStartE2EDuration="1.990752588s" podCreationTimestamp="2026-01-30 12:18:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:18:10.978208102 +0000 UTC m=+5041.528084168" watchObservedRunningTime="2026-01-30 12:18:10.990752588 +0000 UTC m=+5041.540628654" Jan 30 12:18:11 crc kubenswrapper[4869]: I0130 12:18:11.972041 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b648cdf7-q9tdl" event={"ID":"b8fb3786-2ed0-4f33-8528-8c86b8a69c87","Type":"ContainerStarted","Data":"6eeec546a34750aa1eba5eaa3cee89fe398af01af7a361d421b8f45c4b6bd350"} Jan 30 12:18:11 crc kubenswrapper[4869]: I0130 12:18:11.972748 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67b648cdf7-q9tdl" Jan 30 12:18:13 crc kubenswrapper[4869]: I0130 12:18:13.711110 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Jan 30 12:18:13 crc kubenswrapper[4869]: I0130 12:18:13.736854 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67b648cdf7-q9tdl" podStartSLOduration=4.736828398 podStartE2EDuration="4.736828398s" podCreationTimestamp="2026-01-30 12:18:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:18:12.01388102 +0000 UTC m=+5042.563757106" watchObservedRunningTime="2026-01-30 12:18:13.736828398 +0000 UTC m=+5044.286704464" Jan 30 12:18:13 crc kubenswrapper[4869]: I0130 12:18:13.991612 4869 generic.go:334] "Generic (PLEG): container finished" podID="ca9bde57-4216-42b4-bac5-8c614648bac0" containerID="bdabdc64213bb2dbb01c863ab4ef1f268441f2371549453363593e4f4b1cd2c8" exitCode=0 Jan 30 12:18:13 crc kubenswrapper[4869]: I0130 12:18:13.991669 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tqmvq" event={"ID":"ca9bde57-4216-42b4-bac5-8c614648bac0","Type":"ContainerDied","Data":"bdabdc64213bb2dbb01c863ab4ef1f268441f2371549453363593e4f4b1cd2c8"} Jan 30 12:18:15 crc kubenswrapper[4869]: I0130 12:18:15.349126 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tqmvq" Jan 30 12:18:15 crc kubenswrapper[4869]: I0130 12:18:15.479116 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-config-data\") pod \"ca9bde57-4216-42b4-bac5-8c614648bac0\" (UID: \"ca9bde57-4216-42b4-bac5-8c614648bac0\") " Jan 30 12:18:15 crc kubenswrapper[4869]: I0130 12:18:15.479243 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zg464\" (UniqueName: \"kubernetes.io/projected/ca9bde57-4216-42b4-bac5-8c614648bac0-kube-api-access-zg464\") pod \"ca9bde57-4216-42b4-bac5-8c614648bac0\" (UID: \"ca9bde57-4216-42b4-bac5-8c614648bac0\") " Jan 30 12:18:15 crc kubenswrapper[4869]: I0130 12:18:15.479352 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-combined-ca-bundle\") pod \"ca9bde57-4216-42b4-bac5-8c614648bac0\" (UID: \"ca9bde57-4216-42b4-bac5-8c614648bac0\") " Jan 30 12:18:15 crc kubenswrapper[4869]: I0130 12:18:15.479401 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-fernet-keys\") pod \"ca9bde57-4216-42b4-bac5-8c614648bac0\" (UID: \"ca9bde57-4216-42b4-bac5-8c614648bac0\") " Jan 30 12:18:15 crc kubenswrapper[4869]: I0130 12:18:15.479432 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-scripts\") pod \"ca9bde57-4216-42b4-bac5-8c614648bac0\" (UID: \"ca9bde57-4216-42b4-bac5-8c614648bac0\") " Jan 30 12:18:15 crc kubenswrapper[4869]: I0130 12:18:15.479451 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-credential-keys\") pod \"ca9bde57-4216-42b4-bac5-8c614648bac0\" (UID: \"ca9bde57-4216-42b4-bac5-8c614648bac0\") " Jan 30 12:18:15 crc kubenswrapper[4869]: I0130 12:18:15.485584 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-scripts" (OuterVolumeSpecName: "scripts") pod "ca9bde57-4216-42b4-bac5-8c614648bac0" (UID: "ca9bde57-4216-42b4-bac5-8c614648bac0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 12:18:15 crc kubenswrapper[4869]: I0130 12:18:15.485691 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "ca9bde57-4216-42b4-bac5-8c614648bac0" (UID: "ca9bde57-4216-42b4-bac5-8c614648bac0"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 12:18:15 crc kubenswrapper[4869]: I0130 12:18:15.486410 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "ca9bde57-4216-42b4-bac5-8c614648bac0" (UID: "ca9bde57-4216-42b4-bac5-8c614648bac0"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 12:18:15 crc kubenswrapper[4869]: I0130 12:18:15.486826 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca9bde57-4216-42b4-bac5-8c614648bac0-kube-api-access-zg464" (OuterVolumeSpecName: "kube-api-access-zg464") pod "ca9bde57-4216-42b4-bac5-8c614648bac0" (UID: "ca9bde57-4216-42b4-bac5-8c614648bac0"). InnerVolumeSpecName "kube-api-access-zg464". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:18:15 crc kubenswrapper[4869]: I0130 12:18:15.505392 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-config-data" (OuterVolumeSpecName: "config-data") pod "ca9bde57-4216-42b4-bac5-8c614648bac0" (UID: "ca9bde57-4216-42b4-bac5-8c614648bac0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 12:18:15 crc kubenswrapper[4869]: I0130 12:18:15.507738 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ca9bde57-4216-42b4-bac5-8c614648bac0" (UID: "ca9bde57-4216-42b4-bac5-8c614648bac0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 12:18:15 crc kubenswrapper[4869]: I0130 12:18:15.581203 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zg464\" (UniqueName: \"kubernetes.io/projected/ca9bde57-4216-42b4-bac5-8c614648bac0-kube-api-access-zg464\") on node \"crc\" DevicePath \"\"" Jan 30 12:18:15 crc kubenswrapper[4869]: I0130 12:18:15.581241 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 12:18:15 crc kubenswrapper[4869]: I0130 12:18:15.581251 4869 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 30 12:18:15 crc kubenswrapper[4869]: I0130 12:18:15.581259 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 12:18:15 crc kubenswrapper[4869]: I0130 12:18:15.581267 4869 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 30 12:18:15 crc kubenswrapper[4869]: I0130 12:18:15.581276 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca9bde57-4216-42b4-bac5-8c614648bac0-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.008088 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tqmvq" event={"ID":"ca9bde57-4216-42b4-bac5-8c614648bac0","Type":"ContainerDied","Data":"60b18d9673ab38d8ce8dfc9cb78bc80db92e71601e7a4635d3df0ce9345e4e21"} Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.008148 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60b18d9673ab38d8ce8dfc9cb78bc80db92e71601e7a4635d3df0ce9345e4e21" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.008167 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tqmvq" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.086118 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-tqmvq"] Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.094411 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-tqmvq"] Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.144415 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca9bde57-4216-42b4-bac5-8c614648bac0" path="/var/lib/kubelet/pods/ca9bde57-4216-42b4-bac5-8c614648bac0/volumes" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.179686 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-dcp9h"] Jan 30 12:18:16 crc kubenswrapper[4869]: E0130 12:18:16.180100 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca9bde57-4216-42b4-bac5-8c614648bac0" containerName="keystone-bootstrap" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.180126 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca9bde57-4216-42b4-bac5-8c614648bac0" containerName="keystone-bootstrap" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.180334 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca9bde57-4216-42b4-bac5-8c614648bac0" containerName="keystone-bootstrap" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.181081 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dcp9h" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.183653 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.184957 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.184980 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-qkgf7" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.184970 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.185331 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.190793 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-dcp9h"] Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.290131 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-combined-ca-bundle\") pod \"keystone-bootstrap-dcp9h\" (UID: \"787f8551-8c15-4948-802e-6f768a0eae9f\") " pod="openstack/keystone-bootstrap-dcp9h" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.290188 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-fernet-keys\") pod \"keystone-bootstrap-dcp9h\" (UID: \"787f8551-8c15-4948-802e-6f768a0eae9f\") " pod="openstack/keystone-bootstrap-dcp9h" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.290230 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-scripts\") pod \"keystone-bootstrap-dcp9h\" (UID: \"787f8551-8c15-4948-802e-6f768a0eae9f\") " pod="openstack/keystone-bootstrap-dcp9h" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.290326 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-config-data\") pod \"keystone-bootstrap-dcp9h\" (UID: \"787f8551-8c15-4948-802e-6f768a0eae9f\") " pod="openstack/keystone-bootstrap-dcp9h" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.290536 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-credential-keys\") pod \"keystone-bootstrap-dcp9h\" (UID: \"787f8551-8c15-4948-802e-6f768a0eae9f\") " pod="openstack/keystone-bootstrap-dcp9h" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.290735 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8pknr\" (UniqueName: \"kubernetes.io/projected/787f8551-8c15-4948-802e-6f768a0eae9f-kube-api-access-8pknr\") pod \"keystone-bootstrap-dcp9h\" (UID: \"787f8551-8c15-4948-802e-6f768a0eae9f\") " pod="openstack/keystone-bootstrap-dcp9h" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.392319 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-config-data\") pod \"keystone-bootstrap-dcp9h\" (UID: \"787f8551-8c15-4948-802e-6f768a0eae9f\") " pod="openstack/keystone-bootstrap-dcp9h" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.392862 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-credential-keys\") pod \"keystone-bootstrap-dcp9h\" (UID: \"787f8551-8c15-4948-802e-6f768a0eae9f\") " pod="openstack/keystone-bootstrap-dcp9h" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.392917 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8pknr\" (UniqueName: \"kubernetes.io/projected/787f8551-8c15-4948-802e-6f768a0eae9f-kube-api-access-8pknr\") pod \"keystone-bootstrap-dcp9h\" (UID: \"787f8551-8c15-4948-802e-6f768a0eae9f\") " pod="openstack/keystone-bootstrap-dcp9h" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.392968 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-combined-ca-bundle\") pod \"keystone-bootstrap-dcp9h\" (UID: \"787f8551-8c15-4948-802e-6f768a0eae9f\") " pod="openstack/keystone-bootstrap-dcp9h" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.392990 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-fernet-keys\") pod \"keystone-bootstrap-dcp9h\" (UID: \"787f8551-8c15-4948-802e-6f768a0eae9f\") " pod="openstack/keystone-bootstrap-dcp9h" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.393018 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-scripts\") pod \"keystone-bootstrap-dcp9h\" (UID: \"787f8551-8c15-4948-802e-6f768a0eae9f\") " pod="openstack/keystone-bootstrap-dcp9h" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.401624 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-credential-keys\") pod \"keystone-bootstrap-dcp9h\" (UID: \"787f8551-8c15-4948-802e-6f768a0eae9f\") " pod="openstack/keystone-bootstrap-dcp9h" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.406190 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-scripts\") pod \"keystone-bootstrap-dcp9h\" (UID: \"787f8551-8c15-4948-802e-6f768a0eae9f\") " pod="openstack/keystone-bootstrap-dcp9h" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.407069 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-config-data\") pod \"keystone-bootstrap-dcp9h\" (UID: \"787f8551-8c15-4948-802e-6f768a0eae9f\") " pod="openstack/keystone-bootstrap-dcp9h" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.409329 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-fernet-keys\") pod \"keystone-bootstrap-dcp9h\" (UID: \"787f8551-8c15-4948-802e-6f768a0eae9f\") " pod="openstack/keystone-bootstrap-dcp9h" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.430354 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-combined-ca-bundle\") pod \"keystone-bootstrap-dcp9h\" (UID: \"787f8551-8c15-4948-802e-6f768a0eae9f\") " pod="openstack/keystone-bootstrap-dcp9h" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.459419 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8pknr\" (UniqueName: \"kubernetes.io/projected/787f8551-8c15-4948-802e-6f768a0eae9f-kube-api-access-8pknr\") pod \"keystone-bootstrap-dcp9h\" (UID: \"787f8551-8c15-4948-802e-6f768a0eae9f\") " pod="openstack/keystone-bootstrap-dcp9h" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.498126 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dcp9h" Jan 30 12:18:16 crc kubenswrapper[4869]: I0130 12:18:16.956761 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-dcp9h"] Jan 30 12:18:17 crc kubenswrapper[4869]: I0130 12:18:17.015978 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dcp9h" event={"ID":"787f8551-8c15-4948-802e-6f768a0eae9f","Type":"ContainerStarted","Data":"84d01b6f6d094d891dda9e4d420c54d671fb0e26938e5a458ff9ac2a628cc4ca"} Jan 30 12:18:18 crc kubenswrapper[4869]: I0130 12:18:18.026634 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dcp9h" event={"ID":"787f8551-8c15-4948-802e-6f768a0eae9f","Type":"ContainerStarted","Data":"197e2519d4bf321643a135476353a63c1791d959740978aa844ab3ffb4f111ad"} Jan 30 12:18:18 crc kubenswrapper[4869]: I0130 12:18:18.049299 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-dcp9h" podStartSLOduration=2.049277785 podStartE2EDuration="2.049277785s" podCreationTimestamp="2026-01-30 12:18:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:18:18.042895853 +0000 UTC m=+5048.592771919" watchObservedRunningTime="2026-01-30 12:18:18.049277785 +0000 UTC m=+5048.599153851" Jan 30 12:18:19 crc kubenswrapper[4869]: I0130 12:18:19.502014 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67b648cdf7-q9tdl" Jan 30 12:18:19 crc kubenswrapper[4869]: I0130 12:18:19.561461 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-644c8fb5bc-fvfcb"] Jan 30 12:18:19 crc kubenswrapper[4869]: I0130 12:18:19.561814 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" podUID="661dc461-1fbd-4408-a739-c0b70e0d66bb" containerName="dnsmasq-dns" containerID="cri-o://ed8d9df2e746e12a06a489338c408cd79c4eb81166a0848d0632ebd192c565cf" gracePeriod=10 Jan 30 12:18:20 crc kubenswrapper[4869]: I0130 12:18:20.049984 4869 generic.go:334] "Generic (PLEG): container finished" podID="661dc461-1fbd-4408-a739-c0b70e0d66bb" containerID="ed8d9df2e746e12a06a489338c408cd79c4eb81166a0848d0632ebd192c565cf" exitCode=0 Jan 30 12:18:20 crc kubenswrapper[4869]: I0130 12:18:20.050515 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" event={"ID":"661dc461-1fbd-4408-a739-c0b70e0d66bb","Type":"ContainerDied","Data":"ed8d9df2e746e12a06a489338c408cd79c4eb81166a0848d0632ebd192c565cf"} Jan 30 12:18:20 crc kubenswrapper[4869]: I0130 12:18:20.050557 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" event={"ID":"661dc461-1fbd-4408-a739-c0b70e0d66bb","Type":"ContainerDied","Data":"c852f2bd130da24497d7e55d428a7e5759b2db7a57148274011ed5ab968447e0"} Jan 30 12:18:20 crc kubenswrapper[4869]: I0130 12:18:20.050571 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c852f2bd130da24497d7e55d428a7e5759b2db7a57148274011ed5ab968447e0" Jan 30 12:18:20 crc kubenswrapper[4869]: I0130 12:18:20.052595 4869 generic.go:334] "Generic (PLEG): container finished" podID="787f8551-8c15-4948-802e-6f768a0eae9f" containerID="197e2519d4bf321643a135476353a63c1791d959740978aa844ab3ffb4f111ad" exitCode=0 Jan 30 12:18:20 crc kubenswrapper[4869]: I0130 12:18:20.052625 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dcp9h" event={"ID":"787f8551-8c15-4948-802e-6f768a0eae9f","Type":"ContainerDied","Data":"197e2519d4bf321643a135476353a63c1791d959740978aa844ab3ffb4f111ad"} Jan 30 12:18:20 crc kubenswrapper[4869]: I0130 12:18:20.094659 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" Jan 30 12:18:20 crc kubenswrapper[4869]: I0130 12:18:20.159615 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/661dc461-1fbd-4408-a739-c0b70e0d66bb-ovsdbserver-nb\") pod \"661dc461-1fbd-4408-a739-c0b70e0d66bb\" (UID: \"661dc461-1fbd-4408-a739-c0b70e0d66bb\") " Jan 30 12:18:20 crc kubenswrapper[4869]: I0130 12:18:20.159738 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/661dc461-1fbd-4408-a739-c0b70e0d66bb-ovsdbserver-sb\") pod \"661dc461-1fbd-4408-a739-c0b70e0d66bb\" (UID: \"661dc461-1fbd-4408-a739-c0b70e0d66bb\") " Jan 30 12:18:20 crc kubenswrapper[4869]: I0130 12:18:20.159803 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/661dc461-1fbd-4408-a739-c0b70e0d66bb-dns-svc\") pod \"661dc461-1fbd-4408-a739-c0b70e0d66bb\" (UID: \"661dc461-1fbd-4408-a739-c0b70e0d66bb\") " Jan 30 12:18:20 crc kubenswrapper[4869]: I0130 12:18:20.159841 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-66tc5\" (UniqueName: \"kubernetes.io/projected/661dc461-1fbd-4408-a739-c0b70e0d66bb-kube-api-access-66tc5\") pod \"661dc461-1fbd-4408-a739-c0b70e0d66bb\" (UID: \"661dc461-1fbd-4408-a739-c0b70e0d66bb\") " Jan 30 12:18:20 crc kubenswrapper[4869]: I0130 12:18:20.159899 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/661dc461-1fbd-4408-a739-c0b70e0d66bb-config\") pod \"661dc461-1fbd-4408-a739-c0b70e0d66bb\" (UID: \"661dc461-1fbd-4408-a739-c0b70e0d66bb\") " Jan 30 12:18:20 crc kubenswrapper[4869]: I0130 12:18:20.166483 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/661dc461-1fbd-4408-a739-c0b70e0d66bb-kube-api-access-66tc5" (OuterVolumeSpecName: "kube-api-access-66tc5") pod "661dc461-1fbd-4408-a739-c0b70e0d66bb" (UID: "661dc461-1fbd-4408-a739-c0b70e0d66bb"). InnerVolumeSpecName "kube-api-access-66tc5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:18:20 crc kubenswrapper[4869]: I0130 12:18:20.203236 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/661dc461-1fbd-4408-a739-c0b70e0d66bb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "661dc461-1fbd-4408-a739-c0b70e0d66bb" (UID: "661dc461-1fbd-4408-a739-c0b70e0d66bb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 12:18:20 crc kubenswrapper[4869]: I0130 12:18:20.203326 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/661dc461-1fbd-4408-a739-c0b70e0d66bb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "661dc461-1fbd-4408-a739-c0b70e0d66bb" (UID: "661dc461-1fbd-4408-a739-c0b70e0d66bb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 12:18:20 crc kubenswrapper[4869]: I0130 12:18:20.210207 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/661dc461-1fbd-4408-a739-c0b70e0d66bb-config" (OuterVolumeSpecName: "config") pod "661dc461-1fbd-4408-a739-c0b70e0d66bb" (UID: "661dc461-1fbd-4408-a739-c0b70e0d66bb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 12:18:20 crc kubenswrapper[4869]: I0130 12:18:20.225435 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/661dc461-1fbd-4408-a739-c0b70e0d66bb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "661dc461-1fbd-4408-a739-c0b70e0d66bb" (UID: "661dc461-1fbd-4408-a739-c0b70e0d66bb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 12:18:20 crc kubenswrapper[4869]: I0130 12:18:20.261451 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/661dc461-1fbd-4408-a739-c0b70e0d66bb-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 12:18:20 crc kubenswrapper[4869]: I0130 12:18:20.261487 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-66tc5\" (UniqueName: \"kubernetes.io/projected/661dc461-1fbd-4408-a739-c0b70e0d66bb-kube-api-access-66tc5\") on node \"crc\" DevicePath \"\"" Jan 30 12:18:20 crc kubenswrapper[4869]: I0130 12:18:20.261500 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/661dc461-1fbd-4408-a739-c0b70e0d66bb-config\") on node \"crc\" DevicePath \"\"" Jan 30 12:18:20 crc kubenswrapper[4869]: I0130 12:18:20.261511 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/661dc461-1fbd-4408-a739-c0b70e0d66bb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 12:18:20 crc kubenswrapper[4869]: I0130 12:18:20.261524 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/661dc461-1fbd-4408-a739-c0b70e0d66bb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 12:18:21 crc kubenswrapper[4869]: I0130 12:18:21.058271 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-644c8fb5bc-fvfcb" Jan 30 12:18:21 crc kubenswrapper[4869]: I0130 12:18:21.102971 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-644c8fb5bc-fvfcb"] Jan 30 12:18:21 crc kubenswrapper[4869]: I0130 12:18:21.112633 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-644c8fb5bc-fvfcb"] Jan 30 12:18:21 crc kubenswrapper[4869]: I0130 12:18:21.452469 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dcp9h" Jan 30 12:18:21 crc kubenswrapper[4869]: I0130 12:18:21.495584 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-scripts\") pod \"787f8551-8c15-4948-802e-6f768a0eae9f\" (UID: \"787f8551-8c15-4948-802e-6f768a0eae9f\") " Jan 30 12:18:21 crc kubenswrapper[4869]: I0130 12:18:21.495736 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-credential-keys\") pod \"787f8551-8c15-4948-802e-6f768a0eae9f\" (UID: \"787f8551-8c15-4948-802e-6f768a0eae9f\") " Jan 30 12:18:21 crc kubenswrapper[4869]: I0130 12:18:21.495812 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-config-data\") pod \"787f8551-8c15-4948-802e-6f768a0eae9f\" (UID: \"787f8551-8c15-4948-802e-6f768a0eae9f\") " Jan 30 12:18:21 crc kubenswrapper[4869]: I0130 12:18:21.495854 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8pknr\" (UniqueName: \"kubernetes.io/projected/787f8551-8c15-4948-802e-6f768a0eae9f-kube-api-access-8pknr\") pod \"787f8551-8c15-4948-802e-6f768a0eae9f\" (UID: \"787f8551-8c15-4948-802e-6f768a0eae9f\") " Jan 30 12:18:21 crc kubenswrapper[4869]: I0130 12:18:21.495922 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-combined-ca-bundle\") pod \"787f8551-8c15-4948-802e-6f768a0eae9f\" (UID: \"787f8551-8c15-4948-802e-6f768a0eae9f\") " Jan 30 12:18:21 crc kubenswrapper[4869]: I0130 12:18:21.495953 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-fernet-keys\") pod \"787f8551-8c15-4948-802e-6f768a0eae9f\" (UID: \"787f8551-8c15-4948-802e-6f768a0eae9f\") " Jan 30 12:18:21 crc kubenswrapper[4869]: I0130 12:18:21.502460 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "787f8551-8c15-4948-802e-6f768a0eae9f" (UID: "787f8551-8c15-4948-802e-6f768a0eae9f"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 12:18:21 crc kubenswrapper[4869]: I0130 12:18:21.503059 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/787f8551-8c15-4948-802e-6f768a0eae9f-kube-api-access-8pknr" (OuterVolumeSpecName: "kube-api-access-8pknr") pod "787f8551-8c15-4948-802e-6f768a0eae9f" (UID: "787f8551-8c15-4948-802e-6f768a0eae9f"). InnerVolumeSpecName "kube-api-access-8pknr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:18:21 crc kubenswrapper[4869]: I0130 12:18:21.503174 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-scripts" (OuterVolumeSpecName: "scripts") pod "787f8551-8c15-4948-802e-6f768a0eae9f" (UID: "787f8551-8c15-4948-802e-6f768a0eae9f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 12:18:21 crc kubenswrapper[4869]: I0130 12:18:21.503625 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "787f8551-8c15-4948-802e-6f768a0eae9f" (UID: "787f8551-8c15-4948-802e-6f768a0eae9f"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 12:18:21 crc kubenswrapper[4869]: I0130 12:18:21.521429 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-config-data" (OuterVolumeSpecName: "config-data") pod "787f8551-8c15-4948-802e-6f768a0eae9f" (UID: "787f8551-8c15-4948-802e-6f768a0eae9f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 12:18:21 crc kubenswrapper[4869]: I0130 12:18:21.526006 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "787f8551-8c15-4948-802e-6f768a0eae9f" (UID: "787f8551-8c15-4948-802e-6f768a0eae9f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 12:18:21 crc kubenswrapper[4869]: I0130 12:18:21.598150 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8pknr\" (UniqueName: \"kubernetes.io/projected/787f8551-8c15-4948-802e-6f768a0eae9f-kube-api-access-8pknr\") on node \"crc\" DevicePath \"\"" Jan 30 12:18:21 crc kubenswrapper[4869]: I0130 12:18:21.598203 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 12:18:21 crc kubenswrapper[4869]: I0130 12:18:21.598217 4869 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 30 12:18:21 crc kubenswrapper[4869]: I0130 12:18:21.598229 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 12:18:21 crc kubenswrapper[4869]: I0130 12:18:21.598239 4869 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 30 12:18:21 crc kubenswrapper[4869]: I0130 12:18:21.598267 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/787f8551-8c15-4948-802e-6f768a0eae9f-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 12:18:21 crc kubenswrapper[4869]: I0130 12:18:21.769193 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 12:18:21 crc kubenswrapper[4869]: I0130 12:18:21.769265 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.067861 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dcp9h" event={"ID":"787f8551-8c15-4948-802e-6f768a0eae9f","Type":"ContainerDied","Data":"84d01b6f6d094d891dda9e4d420c54d671fb0e26938e5a458ff9ac2a628cc4ca"} Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.067920 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84d01b6f6d094d891dda9e4d420c54d671fb0e26938e5a458ff9ac2a628cc4ca" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.067893 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dcp9h" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.142907 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="661dc461-1fbd-4408-a739-c0b70e0d66bb" path="/var/lib/kubelet/pods/661dc461-1fbd-4408-a739-c0b70e0d66bb/volumes" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.186960 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bb57f64fd-4jsjt"] Jan 30 12:18:22 crc kubenswrapper[4869]: E0130 12:18:22.187284 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="661dc461-1fbd-4408-a739-c0b70e0d66bb" containerName="init" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.187300 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="661dc461-1fbd-4408-a739-c0b70e0d66bb" containerName="init" Jan 30 12:18:22 crc kubenswrapper[4869]: E0130 12:18:22.187314 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="661dc461-1fbd-4408-a739-c0b70e0d66bb" containerName="dnsmasq-dns" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.187322 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="661dc461-1fbd-4408-a739-c0b70e0d66bb" containerName="dnsmasq-dns" Jan 30 12:18:22 crc kubenswrapper[4869]: E0130 12:18:22.187338 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="787f8551-8c15-4948-802e-6f768a0eae9f" containerName="keystone-bootstrap" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.187344 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="787f8551-8c15-4948-802e-6f768a0eae9f" containerName="keystone-bootstrap" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.187501 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="661dc461-1fbd-4408-a739-c0b70e0d66bb" containerName="dnsmasq-dns" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.187512 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="787f8551-8c15-4948-802e-6f768a0eae9f" containerName="keystone-bootstrap" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.188160 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bb57f64fd-4jsjt" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.191631 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.191949 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-qkgf7" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.191975 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.192197 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.221742 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bb57f64fd-4jsjt"] Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.313906 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3e14fe48-4006-43c6-ad0c-18261c5cc38e-fernet-keys\") pod \"keystone-bb57f64fd-4jsjt\" (UID: \"3e14fe48-4006-43c6-ad0c-18261c5cc38e\") " pod="openstack/keystone-bb57f64fd-4jsjt" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.314013 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e14fe48-4006-43c6-ad0c-18261c5cc38e-scripts\") pod \"keystone-bb57f64fd-4jsjt\" (UID: \"3e14fe48-4006-43c6-ad0c-18261c5cc38e\") " pod="openstack/keystone-bb57f64fd-4jsjt" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.314057 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmjzb\" (UniqueName: \"kubernetes.io/projected/3e14fe48-4006-43c6-ad0c-18261c5cc38e-kube-api-access-kmjzb\") pod \"keystone-bb57f64fd-4jsjt\" (UID: \"3e14fe48-4006-43c6-ad0c-18261c5cc38e\") " pod="openstack/keystone-bb57f64fd-4jsjt" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.314097 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e14fe48-4006-43c6-ad0c-18261c5cc38e-config-data\") pod \"keystone-bb57f64fd-4jsjt\" (UID: \"3e14fe48-4006-43c6-ad0c-18261c5cc38e\") " pod="openstack/keystone-bb57f64fd-4jsjt" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.314145 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3e14fe48-4006-43c6-ad0c-18261c5cc38e-credential-keys\") pod \"keystone-bb57f64fd-4jsjt\" (UID: \"3e14fe48-4006-43c6-ad0c-18261c5cc38e\") " pod="openstack/keystone-bb57f64fd-4jsjt" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.314184 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e14fe48-4006-43c6-ad0c-18261c5cc38e-combined-ca-bundle\") pod \"keystone-bb57f64fd-4jsjt\" (UID: \"3e14fe48-4006-43c6-ad0c-18261c5cc38e\") " pod="openstack/keystone-bb57f64fd-4jsjt" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.416169 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3e14fe48-4006-43c6-ad0c-18261c5cc38e-fernet-keys\") pod \"keystone-bb57f64fd-4jsjt\" (UID: \"3e14fe48-4006-43c6-ad0c-18261c5cc38e\") " pod="openstack/keystone-bb57f64fd-4jsjt" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.416269 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e14fe48-4006-43c6-ad0c-18261c5cc38e-scripts\") pod \"keystone-bb57f64fd-4jsjt\" (UID: \"3e14fe48-4006-43c6-ad0c-18261c5cc38e\") " pod="openstack/keystone-bb57f64fd-4jsjt" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.416304 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmjzb\" (UniqueName: \"kubernetes.io/projected/3e14fe48-4006-43c6-ad0c-18261c5cc38e-kube-api-access-kmjzb\") pod \"keystone-bb57f64fd-4jsjt\" (UID: \"3e14fe48-4006-43c6-ad0c-18261c5cc38e\") " pod="openstack/keystone-bb57f64fd-4jsjt" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.416332 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e14fe48-4006-43c6-ad0c-18261c5cc38e-config-data\") pod \"keystone-bb57f64fd-4jsjt\" (UID: \"3e14fe48-4006-43c6-ad0c-18261c5cc38e\") " pod="openstack/keystone-bb57f64fd-4jsjt" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.416363 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3e14fe48-4006-43c6-ad0c-18261c5cc38e-credential-keys\") pod \"keystone-bb57f64fd-4jsjt\" (UID: \"3e14fe48-4006-43c6-ad0c-18261c5cc38e\") " pod="openstack/keystone-bb57f64fd-4jsjt" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.416391 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e14fe48-4006-43c6-ad0c-18261c5cc38e-combined-ca-bundle\") pod \"keystone-bb57f64fd-4jsjt\" (UID: \"3e14fe48-4006-43c6-ad0c-18261c5cc38e\") " pod="openstack/keystone-bb57f64fd-4jsjt" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.421676 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3e14fe48-4006-43c6-ad0c-18261c5cc38e-credential-keys\") pod \"keystone-bb57f64fd-4jsjt\" (UID: \"3e14fe48-4006-43c6-ad0c-18261c5cc38e\") " pod="openstack/keystone-bb57f64fd-4jsjt" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.422154 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e14fe48-4006-43c6-ad0c-18261c5cc38e-scripts\") pod \"keystone-bb57f64fd-4jsjt\" (UID: \"3e14fe48-4006-43c6-ad0c-18261c5cc38e\") " pod="openstack/keystone-bb57f64fd-4jsjt" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.422322 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e14fe48-4006-43c6-ad0c-18261c5cc38e-config-data\") pod \"keystone-bb57f64fd-4jsjt\" (UID: \"3e14fe48-4006-43c6-ad0c-18261c5cc38e\") " pod="openstack/keystone-bb57f64fd-4jsjt" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.424133 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e14fe48-4006-43c6-ad0c-18261c5cc38e-combined-ca-bundle\") pod \"keystone-bb57f64fd-4jsjt\" (UID: \"3e14fe48-4006-43c6-ad0c-18261c5cc38e\") " pod="openstack/keystone-bb57f64fd-4jsjt" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.441095 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3e14fe48-4006-43c6-ad0c-18261c5cc38e-fernet-keys\") pod \"keystone-bb57f64fd-4jsjt\" (UID: \"3e14fe48-4006-43c6-ad0c-18261c5cc38e\") " pod="openstack/keystone-bb57f64fd-4jsjt" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.443648 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmjzb\" (UniqueName: \"kubernetes.io/projected/3e14fe48-4006-43c6-ad0c-18261c5cc38e-kube-api-access-kmjzb\") pod \"keystone-bb57f64fd-4jsjt\" (UID: \"3e14fe48-4006-43c6-ad0c-18261c5cc38e\") " pod="openstack/keystone-bb57f64fd-4jsjt" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.533871 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bb57f64fd-4jsjt" Jan 30 12:18:22 crc kubenswrapper[4869]: I0130 12:18:22.971307 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bb57f64fd-4jsjt"] Jan 30 12:18:23 crc kubenswrapper[4869]: I0130 12:18:23.077912 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bb57f64fd-4jsjt" event={"ID":"3e14fe48-4006-43c6-ad0c-18261c5cc38e","Type":"ContainerStarted","Data":"7964c9ec6f2693f70fb11e4af3c2e5f503e508e0c484d874e92f0d2b6572fe50"} Jan 30 12:18:24 crc kubenswrapper[4869]: I0130 12:18:24.086487 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bb57f64fd-4jsjt" event={"ID":"3e14fe48-4006-43c6-ad0c-18261c5cc38e","Type":"ContainerStarted","Data":"9645206d849da5036c28f846ff9e7f3dd1331f92707f5bcded6e095fb9daf616"} Jan 30 12:18:24 crc kubenswrapper[4869]: I0130 12:18:24.087426 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-bb57f64fd-4jsjt" Jan 30 12:18:24 crc kubenswrapper[4869]: I0130 12:18:24.110627 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bb57f64fd-4jsjt" podStartSLOduration=2.110600285 podStartE2EDuration="2.110600285s" podCreationTimestamp="2026-01-30 12:18:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:18:24.102639899 +0000 UTC m=+5054.652515965" watchObservedRunningTime="2026-01-30 12:18:24.110600285 +0000 UTC m=+5054.660476351" Jan 30 12:18:51 crc kubenswrapper[4869]: I0130 12:18:51.769397 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 12:18:51 crc kubenswrapper[4869]: I0130 12:18:51.770085 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 12:18:51 crc kubenswrapper[4869]: I0130 12:18:51.770146 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 12:18:51 crc kubenswrapper[4869]: I0130 12:18:51.770948 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2753fcbf560309b5b7e37904d0d2cf7f1caef840cef8861e52eecbabf9d52a12"} pod="openshift-machine-config-operator/machine-config-daemon-99lr2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 12:18:51 crc kubenswrapper[4869]: I0130 12:18:51.771005 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" containerID="cri-o://2753fcbf560309b5b7e37904d0d2cf7f1caef840cef8861e52eecbabf9d52a12" gracePeriod=600 Jan 30 12:18:52 crc kubenswrapper[4869]: I0130 12:18:52.324326 4869 generic.go:334] "Generic (PLEG): container finished" podID="ef13186b-7f82-4025-97e3-d899be8c207f" containerID="2753fcbf560309b5b7e37904d0d2cf7f1caef840cef8861e52eecbabf9d52a12" exitCode=0 Jan 30 12:18:52 crc kubenswrapper[4869]: I0130 12:18:52.324398 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerDied","Data":"2753fcbf560309b5b7e37904d0d2cf7f1caef840cef8861e52eecbabf9d52a12"} Jan 30 12:18:52 crc kubenswrapper[4869]: I0130 12:18:52.324744 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerStarted","Data":"eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6"} Jan 30 12:18:52 crc kubenswrapper[4869]: I0130 12:18:52.324783 4869 scope.go:117] "RemoveContainer" containerID="99eeb9bf57f71af225cc2749d0f9a445ec42d4ae09d8c7cd60aabae1df6536e4" Jan 30 12:18:54 crc kubenswrapper[4869]: I0130 12:18:54.157854 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-bb57f64fd-4jsjt" Jan 30 12:18:58 crc kubenswrapper[4869]: I0130 12:18:58.375397 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Jan 30 12:18:58 crc kubenswrapper[4869]: I0130 12:18:58.377332 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 30 12:18:58 crc kubenswrapper[4869]: I0130 12:18:58.379537 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Jan 30 12:18:58 crc kubenswrapper[4869]: I0130 12:18:58.379579 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-fc74w" Jan 30 12:18:58 crc kubenswrapper[4869]: I0130 12:18:58.379581 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Jan 30 12:18:58 crc kubenswrapper[4869]: I0130 12:18:58.387933 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 30 12:18:58 crc kubenswrapper[4869]: I0130 12:18:58.540333 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/50c7288f-ea48-4c27-b910-19c2a8dec150-openstack-config\") pod \"openstackclient\" (UID: \"50c7288f-ea48-4c27-b910-19c2a8dec150\") " pod="openstack/openstackclient" Jan 30 12:18:58 crc kubenswrapper[4869]: I0130 12:18:58.540423 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/50c7288f-ea48-4c27-b910-19c2a8dec150-openstack-config-secret\") pod \"openstackclient\" (UID: \"50c7288f-ea48-4c27-b910-19c2a8dec150\") " pod="openstack/openstackclient" Jan 30 12:18:58 crc kubenswrapper[4869]: I0130 12:18:58.540483 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpd4j\" (UniqueName: \"kubernetes.io/projected/50c7288f-ea48-4c27-b910-19c2a8dec150-kube-api-access-bpd4j\") pod \"openstackclient\" (UID: \"50c7288f-ea48-4c27-b910-19c2a8dec150\") " pod="openstack/openstackclient" Jan 30 12:18:58 crc kubenswrapper[4869]: I0130 12:18:58.642368 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpd4j\" (UniqueName: \"kubernetes.io/projected/50c7288f-ea48-4c27-b910-19c2a8dec150-kube-api-access-bpd4j\") pod \"openstackclient\" (UID: \"50c7288f-ea48-4c27-b910-19c2a8dec150\") " pod="openstack/openstackclient" Jan 30 12:18:58 crc kubenswrapper[4869]: I0130 12:18:58.642492 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/50c7288f-ea48-4c27-b910-19c2a8dec150-openstack-config\") pod \"openstackclient\" (UID: \"50c7288f-ea48-4c27-b910-19c2a8dec150\") " pod="openstack/openstackclient" Jan 30 12:18:58 crc kubenswrapper[4869]: I0130 12:18:58.642525 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/50c7288f-ea48-4c27-b910-19c2a8dec150-openstack-config-secret\") pod \"openstackclient\" (UID: \"50c7288f-ea48-4c27-b910-19c2a8dec150\") " pod="openstack/openstackclient" Jan 30 12:18:58 crc kubenswrapper[4869]: I0130 12:18:58.644285 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/50c7288f-ea48-4c27-b910-19c2a8dec150-openstack-config\") pod \"openstackclient\" (UID: \"50c7288f-ea48-4c27-b910-19c2a8dec150\") " pod="openstack/openstackclient" Jan 30 12:18:58 crc kubenswrapper[4869]: I0130 12:18:58.653269 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/50c7288f-ea48-4c27-b910-19c2a8dec150-openstack-config-secret\") pod \"openstackclient\" (UID: \"50c7288f-ea48-4c27-b910-19c2a8dec150\") " pod="openstack/openstackclient" Jan 30 12:18:58 crc kubenswrapper[4869]: I0130 12:18:58.662348 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpd4j\" (UniqueName: \"kubernetes.io/projected/50c7288f-ea48-4c27-b910-19c2a8dec150-kube-api-access-bpd4j\") pod \"openstackclient\" (UID: \"50c7288f-ea48-4c27-b910-19c2a8dec150\") " pod="openstack/openstackclient" Jan 30 12:18:58 crc kubenswrapper[4869]: I0130 12:18:58.697433 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 30 12:18:59 crc kubenswrapper[4869]: I0130 12:18:59.128535 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 30 12:18:59 crc kubenswrapper[4869]: I0130 12:18:59.381294 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"50c7288f-ea48-4c27-b910-19c2a8dec150","Type":"ContainerStarted","Data":"b76c35e3069868375c1f2eab38d32ffa00c6aeb544327c00505e62bae8e89d28"} Jan 30 12:18:59 crc kubenswrapper[4869]: I0130 12:18:59.381653 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"50c7288f-ea48-4c27-b910-19c2a8dec150","Type":"ContainerStarted","Data":"bedd5cecbc7dadc9394677071b585caf1683cd4b8f776d526910bbb9d9a70eda"} Jan 30 12:18:59 crc kubenswrapper[4869]: I0130 12:18:59.404021 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=1.4039926 podStartE2EDuration="1.4039926s" podCreationTimestamp="2026-01-30 12:18:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 12:18:59.397637869 +0000 UTC m=+5089.947513965" watchObservedRunningTime="2026-01-30 12:18:59.4039926 +0000 UTC m=+5089.953868696" Jan 30 12:20:56 crc kubenswrapper[4869]: I0130 12:20:56.052198 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-md2pp"] Jan 30 12:20:56 crc kubenswrapper[4869]: I0130 12:20:56.061986 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-md2pp"] Jan 30 12:20:56 crc kubenswrapper[4869]: I0130 12:20:56.142316 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a7b3b5c-5d53-4333-83c6-5dceec6a4eae" path="/var/lib/kubelet/pods/0a7b3b5c-5d53-4333-83c6-5dceec6a4eae/volumes" Jan 30 12:21:16 crc kubenswrapper[4869]: I0130 12:21:16.076274 4869 scope.go:117] "RemoveContainer" containerID="835745b4c080ac9d80bcd47a88e6b218d6a4b3abf85884a69d72fe0aefdd01be" Jan 30 12:21:21 crc kubenswrapper[4869]: I0130 12:21:21.769576 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 12:21:21 crc kubenswrapper[4869]: I0130 12:21:21.770406 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 12:21:23 crc kubenswrapper[4869]: I0130 12:21:23.838521 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-s4krq"] Jan 30 12:21:23 crc kubenswrapper[4869]: I0130 12:21:23.841132 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s4krq" Jan 30 12:21:23 crc kubenswrapper[4869]: I0130 12:21:23.859052 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-s4krq"] Jan 30 12:21:23 crc kubenswrapper[4869]: I0130 12:21:23.929549 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c4e979c-a064-42b3-bdae-5b01c7c40529-catalog-content\") pod \"redhat-marketplace-s4krq\" (UID: \"5c4e979c-a064-42b3-bdae-5b01c7c40529\") " pod="openshift-marketplace/redhat-marketplace-s4krq" Jan 30 12:21:23 crc kubenswrapper[4869]: I0130 12:21:23.929786 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c4e979c-a064-42b3-bdae-5b01c7c40529-utilities\") pod \"redhat-marketplace-s4krq\" (UID: \"5c4e979c-a064-42b3-bdae-5b01c7c40529\") " pod="openshift-marketplace/redhat-marketplace-s4krq" Jan 30 12:21:23 crc kubenswrapper[4869]: I0130 12:21:23.929969 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jprt7\" (UniqueName: \"kubernetes.io/projected/5c4e979c-a064-42b3-bdae-5b01c7c40529-kube-api-access-jprt7\") pod \"redhat-marketplace-s4krq\" (UID: \"5c4e979c-a064-42b3-bdae-5b01c7c40529\") " pod="openshift-marketplace/redhat-marketplace-s4krq" Jan 30 12:21:24 crc kubenswrapper[4869]: I0130 12:21:24.031561 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jprt7\" (UniqueName: \"kubernetes.io/projected/5c4e979c-a064-42b3-bdae-5b01c7c40529-kube-api-access-jprt7\") pod \"redhat-marketplace-s4krq\" (UID: \"5c4e979c-a064-42b3-bdae-5b01c7c40529\") " pod="openshift-marketplace/redhat-marketplace-s4krq" Jan 30 12:21:24 crc kubenswrapper[4869]: I0130 12:21:24.031653 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c4e979c-a064-42b3-bdae-5b01c7c40529-catalog-content\") pod \"redhat-marketplace-s4krq\" (UID: \"5c4e979c-a064-42b3-bdae-5b01c7c40529\") " pod="openshift-marketplace/redhat-marketplace-s4krq" Jan 30 12:21:24 crc kubenswrapper[4869]: I0130 12:21:24.031746 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c4e979c-a064-42b3-bdae-5b01c7c40529-utilities\") pod \"redhat-marketplace-s4krq\" (UID: \"5c4e979c-a064-42b3-bdae-5b01c7c40529\") " pod="openshift-marketplace/redhat-marketplace-s4krq" Jan 30 12:21:24 crc kubenswrapper[4869]: I0130 12:21:24.032215 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c4e979c-a064-42b3-bdae-5b01c7c40529-catalog-content\") pod \"redhat-marketplace-s4krq\" (UID: \"5c4e979c-a064-42b3-bdae-5b01c7c40529\") " pod="openshift-marketplace/redhat-marketplace-s4krq" Jan 30 12:21:24 crc kubenswrapper[4869]: I0130 12:21:24.032235 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c4e979c-a064-42b3-bdae-5b01c7c40529-utilities\") pod \"redhat-marketplace-s4krq\" (UID: \"5c4e979c-a064-42b3-bdae-5b01c7c40529\") " pod="openshift-marketplace/redhat-marketplace-s4krq" Jan 30 12:21:24 crc kubenswrapper[4869]: I0130 12:21:24.055705 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jprt7\" (UniqueName: \"kubernetes.io/projected/5c4e979c-a064-42b3-bdae-5b01c7c40529-kube-api-access-jprt7\") pod \"redhat-marketplace-s4krq\" (UID: \"5c4e979c-a064-42b3-bdae-5b01c7c40529\") " pod="openshift-marketplace/redhat-marketplace-s4krq" Jan 30 12:21:24 crc kubenswrapper[4869]: I0130 12:21:24.172753 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s4krq" Jan 30 12:21:24 crc kubenswrapper[4869]: I0130 12:21:24.640865 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-s4krq"] Jan 30 12:21:25 crc kubenswrapper[4869]: I0130 12:21:25.517971 4869 generic.go:334] "Generic (PLEG): container finished" podID="5c4e979c-a064-42b3-bdae-5b01c7c40529" containerID="5cf13e31064f5bc4d23d67a7d980d3808b5c529b6d7ba3d570847d4f251efd0e" exitCode=0 Jan 30 12:21:25 crc kubenswrapper[4869]: I0130 12:21:25.518088 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s4krq" event={"ID":"5c4e979c-a064-42b3-bdae-5b01c7c40529","Type":"ContainerDied","Data":"5cf13e31064f5bc4d23d67a7d980d3808b5c529b6d7ba3d570847d4f251efd0e"} Jan 30 12:21:25 crc kubenswrapper[4869]: I0130 12:21:25.518381 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s4krq" event={"ID":"5c4e979c-a064-42b3-bdae-5b01c7c40529","Type":"ContainerStarted","Data":"bc0442fb1dbdc220e3b281b07229a7f38c109d9bbf2640b617708d231dfd1715"} Jan 30 12:21:25 crc kubenswrapper[4869]: I0130 12:21:25.520510 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 12:21:27 crc kubenswrapper[4869]: I0130 12:21:27.019387 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-z259g"] Jan 30 12:21:27 crc kubenswrapper[4869]: I0130 12:21:27.022129 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z259g" Jan 30 12:21:27 crc kubenswrapper[4869]: I0130 12:21:27.036537 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-z259g"] Jan 30 12:21:27 crc kubenswrapper[4869]: I0130 12:21:27.087656 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07826881-572d-4562-9056-8dd3a1d87208-catalog-content\") pod \"community-operators-z259g\" (UID: \"07826881-572d-4562-9056-8dd3a1d87208\") " pod="openshift-marketplace/community-operators-z259g" Jan 30 12:21:27 crc kubenswrapper[4869]: I0130 12:21:27.087769 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7l4bk\" (UniqueName: \"kubernetes.io/projected/07826881-572d-4562-9056-8dd3a1d87208-kube-api-access-7l4bk\") pod \"community-operators-z259g\" (UID: \"07826881-572d-4562-9056-8dd3a1d87208\") " pod="openshift-marketplace/community-operators-z259g" Jan 30 12:21:27 crc kubenswrapper[4869]: I0130 12:21:27.087817 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07826881-572d-4562-9056-8dd3a1d87208-utilities\") pod \"community-operators-z259g\" (UID: \"07826881-572d-4562-9056-8dd3a1d87208\") " pod="openshift-marketplace/community-operators-z259g" Jan 30 12:21:27 crc kubenswrapper[4869]: I0130 12:21:27.189796 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07826881-572d-4562-9056-8dd3a1d87208-catalog-content\") pod \"community-operators-z259g\" (UID: \"07826881-572d-4562-9056-8dd3a1d87208\") " pod="openshift-marketplace/community-operators-z259g" Jan 30 12:21:27 crc kubenswrapper[4869]: I0130 12:21:27.189909 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7l4bk\" (UniqueName: \"kubernetes.io/projected/07826881-572d-4562-9056-8dd3a1d87208-kube-api-access-7l4bk\") pod \"community-operators-z259g\" (UID: \"07826881-572d-4562-9056-8dd3a1d87208\") " pod="openshift-marketplace/community-operators-z259g" Jan 30 12:21:27 crc kubenswrapper[4869]: I0130 12:21:27.189946 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07826881-572d-4562-9056-8dd3a1d87208-utilities\") pod \"community-operators-z259g\" (UID: \"07826881-572d-4562-9056-8dd3a1d87208\") " pod="openshift-marketplace/community-operators-z259g" Jan 30 12:21:27 crc kubenswrapper[4869]: I0130 12:21:27.190642 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07826881-572d-4562-9056-8dd3a1d87208-catalog-content\") pod \"community-operators-z259g\" (UID: \"07826881-572d-4562-9056-8dd3a1d87208\") " pod="openshift-marketplace/community-operators-z259g" Jan 30 12:21:27 crc kubenswrapper[4869]: I0130 12:21:27.190651 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07826881-572d-4562-9056-8dd3a1d87208-utilities\") pod \"community-operators-z259g\" (UID: \"07826881-572d-4562-9056-8dd3a1d87208\") " pod="openshift-marketplace/community-operators-z259g" Jan 30 12:21:27 crc kubenswrapper[4869]: I0130 12:21:27.212311 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7l4bk\" (UniqueName: \"kubernetes.io/projected/07826881-572d-4562-9056-8dd3a1d87208-kube-api-access-7l4bk\") pod \"community-operators-z259g\" (UID: \"07826881-572d-4562-9056-8dd3a1d87208\") " pod="openshift-marketplace/community-operators-z259g" Jan 30 12:21:27 crc kubenswrapper[4869]: I0130 12:21:27.348481 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z259g" Jan 30 12:21:27 crc kubenswrapper[4869]: I0130 12:21:27.556681 4869 generic.go:334] "Generic (PLEG): container finished" podID="5c4e979c-a064-42b3-bdae-5b01c7c40529" containerID="76685bc7dfe426b9e24c3ad327ad35595e20c331fa43d2561d3f1001951dcd88" exitCode=0 Jan 30 12:21:27 crc kubenswrapper[4869]: I0130 12:21:27.557102 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s4krq" event={"ID":"5c4e979c-a064-42b3-bdae-5b01c7c40529","Type":"ContainerDied","Data":"76685bc7dfe426b9e24c3ad327ad35595e20c331fa43d2561d3f1001951dcd88"} Jan 30 12:21:27 crc kubenswrapper[4869]: I0130 12:21:27.861299 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-z259g"] Jan 30 12:21:27 crc kubenswrapper[4869]: W0130 12:21:27.864285 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod07826881_572d_4562_9056_8dd3a1d87208.slice/crio-2941783fe82cec6d0d5936164b8054578409e8f621ac59af9880e61ea8c33649 WatchSource:0}: Error finding container 2941783fe82cec6d0d5936164b8054578409e8f621ac59af9880e61ea8c33649: Status 404 returned error can't find the container with id 2941783fe82cec6d0d5936164b8054578409e8f621ac59af9880e61ea8c33649 Jan 30 12:21:28 crc kubenswrapper[4869]: I0130 12:21:28.568457 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s4krq" event={"ID":"5c4e979c-a064-42b3-bdae-5b01c7c40529","Type":"ContainerStarted","Data":"699711b5613fac21492c7b4c88caf7d5ac03931ba09915eae0f94dcbd3e14aab"} Jan 30 12:21:28 crc kubenswrapper[4869]: I0130 12:21:28.572281 4869 generic.go:334] "Generic (PLEG): container finished" podID="07826881-572d-4562-9056-8dd3a1d87208" containerID="d924f3d7e12b85143a9c6822409ada6e2d60ab098b51918a146d9935bda4d89c" exitCode=0 Jan 30 12:21:28 crc kubenswrapper[4869]: I0130 12:21:28.572319 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z259g" event={"ID":"07826881-572d-4562-9056-8dd3a1d87208","Type":"ContainerDied","Data":"d924f3d7e12b85143a9c6822409ada6e2d60ab098b51918a146d9935bda4d89c"} Jan 30 12:21:28 crc kubenswrapper[4869]: I0130 12:21:28.572340 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z259g" event={"ID":"07826881-572d-4562-9056-8dd3a1d87208","Type":"ContainerStarted","Data":"2941783fe82cec6d0d5936164b8054578409e8f621ac59af9880e61ea8c33649"} Jan 30 12:21:28 crc kubenswrapper[4869]: I0130 12:21:28.591177 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-s4krq" podStartSLOduration=2.992914639 podStartE2EDuration="5.591148463s" podCreationTimestamp="2026-01-30 12:21:23 +0000 UTC" firstStartedPulling="2026-01-30 12:21:25.520250142 +0000 UTC m=+5236.070126208" lastFinishedPulling="2026-01-30 12:21:28.118483976 +0000 UTC m=+5238.668360032" observedRunningTime="2026-01-30 12:21:28.585266007 +0000 UTC m=+5239.135142073" watchObservedRunningTime="2026-01-30 12:21:28.591148463 +0000 UTC m=+5239.141024529" Jan 30 12:21:28 crc kubenswrapper[4869]: I0130 12:21:28.890196 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-kzp9s/must-gather-cjntw"] Jan 30 12:21:28 crc kubenswrapper[4869]: I0130 12:21:28.892149 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kzp9s/must-gather-cjntw" Jan 30 12:21:28 crc kubenswrapper[4869]: I0130 12:21:28.895241 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-kzp9s"/"openshift-service-ca.crt" Jan 30 12:21:28 crc kubenswrapper[4869]: I0130 12:21:28.895584 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-kzp9s"/"kube-root-ca.crt" Jan 30 12:21:28 crc kubenswrapper[4869]: I0130 12:21:28.896024 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-kzp9s"/"default-dockercfg-42dsv" Jan 30 12:21:28 crc kubenswrapper[4869]: I0130 12:21:28.913668 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-kzp9s/must-gather-cjntw"] Jan 30 12:21:28 crc kubenswrapper[4869]: I0130 12:21:28.921071 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmd9n\" (UniqueName: \"kubernetes.io/projected/0654b0c4-7447-4b20-abf9-c7978f7481da-kube-api-access-dmd9n\") pod \"must-gather-cjntw\" (UID: \"0654b0c4-7447-4b20-abf9-c7978f7481da\") " pod="openshift-must-gather-kzp9s/must-gather-cjntw" Jan 30 12:21:28 crc kubenswrapper[4869]: I0130 12:21:28.921258 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/0654b0c4-7447-4b20-abf9-c7978f7481da-must-gather-output\") pod \"must-gather-cjntw\" (UID: \"0654b0c4-7447-4b20-abf9-c7978f7481da\") " pod="openshift-must-gather-kzp9s/must-gather-cjntw" Jan 30 12:21:29 crc kubenswrapper[4869]: I0130 12:21:29.023488 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmd9n\" (UniqueName: \"kubernetes.io/projected/0654b0c4-7447-4b20-abf9-c7978f7481da-kube-api-access-dmd9n\") pod \"must-gather-cjntw\" (UID: \"0654b0c4-7447-4b20-abf9-c7978f7481da\") " pod="openshift-must-gather-kzp9s/must-gather-cjntw" Jan 30 12:21:29 crc kubenswrapper[4869]: I0130 12:21:29.023610 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/0654b0c4-7447-4b20-abf9-c7978f7481da-must-gather-output\") pod \"must-gather-cjntw\" (UID: \"0654b0c4-7447-4b20-abf9-c7978f7481da\") " pod="openshift-must-gather-kzp9s/must-gather-cjntw" Jan 30 12:21:29 crc kubenswrapper[4869]: I0130 12:21:29.024178 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/0654b0c4-7447-4b20-abf9-c7978f7481da-must-gather-output\") pod \"must-gather-cjntw\" (UID: \"0654b0c4-7447-4b20-abf9-c7978f7481da\") " pod="openshift-must-gather-kzp9s/must-gather-cjntw" Jan 30 12:21:29 crc kubenswrapper[4869]: I0130 12:21:29.047465 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmd9n\" (UniqueName: \"kubernetes.io/projected/0654b0c4-7447-4b20-abf9-c7978f7481da-kube-api-access-dmd9n\") pod \"must-gather-cjntw\" (UID: \"0654b0c4-7447-4b20-abf9-c7978f7481da\") " pod="openshift-must-gather-kzp9s/must-gather-cjntw" Jan 30 12:21:29 crc kubenswrapper[4869]: I0130 12:21:29.215550 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kzp9s/must-gather-cjntw" Jan 30 12:21:29 crc kubenswrapper[4869]: I0130 12:21:29.473525 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-kzp9s/must-gather-cjntw"] Jan 30 12:21:29 crc kubenswrapper[4869]: I0130 12:21:29.581096 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kzp9s/must-gather-cjntw" event={"ID":"0654b0c4-7447-4b20-abf9-c7978f7481da","Type":"ContainerStarted","Data":"6aba0a7203dd88ba9ac88cd96959574813932355b09363f270f39a72aa7f1e65"} Jan 30 12:21:30 crc kubenswrapper[4869]: I0130 12:21:30.597949 4869 generic.go:334] "Generic (PLEG): container finished" podID="07826881-572d-4562-9056-8dd3a1d87208" containerID="2a36eb6a6c3483d03af97b06576afafde2abd839fb9ae75f5b2dd2b2084fa0d2" exitCode=0 Jan 30 12:21:30 crc kubenswrapper[4869]: I0130 12:21:30.598012 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z259g" event={"ID":"07826881-572d-4562-9056-8dd3a1d87208","Type":"ContainerDied","Data":"2a36eb6a6c3483d03af97b06576afafde2abd839fb9ae75f5b2dd2b2084fa0d2"} Jan 30 12:21:31 crc kubenswrapper[4869]: I0130 12:21:31.613120 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z259g" event={"ID":"07826881-572d-4562-9056-8dd3a1d87208","Type":"ContainerStarted","Data":"b994c44ecc7dd31eae6e29ab88685f15f40cb4d230524c04fa5d72d11b663778"} Jan 30 12:21:31 crc kubenswrapper[4869]: I0130 12:21:31.638218 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-z259g" podStartSLOduration=3.207631107 podStartE2EDuration="5.638193129s" podCreationTimestamp="2026-01-30 12:21:26 +0000 UTC" firstStartedPulling="2026-01-30 12:21:28.574239965 +0000 UTC m=+5239.124116031" lastFinishedPulling="2026-01-30 12:21:31.004801977 +0000 UTC m=+5241.554678053" observedRunningTime="2026-01-30 12:21:31.63541191 +0000 UTC m=+5242.185287986" watchObservedRunningTime="2026-01-30 12:21:31.638193129 +0000 UTC m=+5242.188069195" Jan 30 12:21:34 crc kubenswrapper[4869]: I0130 12:21:34.173043 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-s4krq" Jan 30 12:21:34 crc kubenswrapper[4869]: I0130 12:21:34.173509 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-s4krq" Jan 30 12:21:34 crc kubenswrapper[4869]: I0130 12:21:34.222626 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-s4krq" Jan 30 12:21:34 crc kubenswrapper[4869]: I0130 12:21:34.675769 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-s4krq" Jan 30 12:21:35 crc kubenswrapper[4869]: I0130 12:21:35.409341 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-s4krq"] Jan 30 12:21:36 crc kubenswrapper[4869]: I0130 12:21:36.651994 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-s4krq" podUID="5c4e979c-a064-42b3-bdae-5b01c7c40529" containerName="registry-server" containerID="cri-o://699711b5613fac21492c7b4c88caf7d5ac03931ba09915eae0f94dcbd3e14aab" gracePeriod=2 Jan 30 12:21:36 crc kubenswrapper[4869]: I0130 12:21:36.652764 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kzp9s/must-gather-cjntw" event={"ID":"0654b0c4-7447-4b20-abf9-c7978f7481da","Type":"ContainerStarted","Data":"488ac7a5500e6c34e1ac9ef9d6a5cca571580240de03c96ca59c8a2315d95d0f"} Jan 30 12:21:36 crc kubenswrapper[4869]: I0130 12:21:36.652790 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kzp9s/must-gather-cjntw" event={"ID":"0654b0c4-7447-4b20-abf9-c7978f7481da","Type":"ContainerStarted","Data":"778717c4c6abd6ac4a11a2a39620e5b2925257a2c286f46727a1afef91a80f95"} Jan 30 12:21:36 crc kubenswrapper[4869]: I0130 12:21:36.676380 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-kzp9s/must-gather-cjntw" podStartSLOduration=2.6470590400000003 podStartE2EDuration="8.676363061s" podCreationTimestamp="2026-01-30 12:21:28 +0000 UTC" firstStartedPulling="2026-01-30 12:21:29.477568549 +0000 UTC m=+5240.027444615" lastFinishedPulling="2026-01-30 12:21:35.50687257 +0000 UTC m=+5246.056748636" observedRunningTime="2026-01-30 12:21:36.670944998 +0000 UTC m=+5247.220821074" watchObservedRunningTime="2026-01-30 12:21:36.676363061 +0000 UTC m=+5247.226239127" Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.053409 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s4krq" Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.209435 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c4e979c-a064-42b3-bdae-5b01c7c40529-catalog-content\") pod \"5c4e979c-a064-42b3-bdae-5b01c7c40529\" (UID: \"5c4e979c-a064-42b3-bdae-5b01c7c40529\") " Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.209498 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jprt7\" (UniqueName: \"kubernetes.io/projected/5c4e979c-a064-42b3-bdae-5b01c7c40529-kube-api-access-jprt7\") pod \"5c4e979c-a064-42b3-bdae-5b01c7c40529\" (UID: \"5c4e979c-a064-42b3-bdae-5b01c7c40529\") " Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.209614 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c4e979c-a064-42b3-bdae-5b01c7c40529-utilities\") pod \"5c4e979c-a064-42b3-bdae-5b01c7c40529\" (UID: \"5c4e979c-a064-42b3-bdae-5b01c7c40529\") " Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.210769 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c4e979c-a064-42b3-bdae-5b01c7c40529-utilities" (OuterVolumeSpecName: "utilities") pod "5c4e979c-a064-42b3-bdae-5b01c7c40529" (UID: "5c4e979c-a064-42b3-bdae-5b01c7c40529"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.219320 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c4e979c-a064-42b3-bdae-5b01c7c40529-kube-api-access-jprt7" (OuterVolumeSpecName: "kube-api-access-jprt7") pod "5c4e979c-a064-42b3-bdae-5b01c7c40529" (UID: "5c4e979c-a064-42b3-bdae-5b01c7c40529"). InnerVolumeSpecName "kube-api-access-jprt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.236365 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c4e979c-a064-42b3-bdae-5b01c7c40529-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5c4e979c-a064-42b3-bdae-5b01c7c40529" (UID: "5c4e979c-a064-42b3-bdae-5b01c7c40529"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.311634 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c4e979c-a064-42b3-bdae-5b01c7c40529-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.311683 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c4e979c-a064-42b3-bdae-5b01c7c40529-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.311792 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jprt7\" (UniqueName: \"kubernetes.io/projected/5c4e979c-a064-42b3-bdae-5b01c7c40529-kube-api-access-jprt7\") on node \"crc\" DevicePath \"\"" Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.349092 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-z259g" Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.349385 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-z259g" Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.396604 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-z259g" Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.662464 4869 generic.go:334] "Generic (PLEG): container finished" podID="5c4e979c-a064-42b3-bdae-5b01c7c40529" containerID="699711b5613fac21492c7b4c88caf7d5ac03931ba09915eae0f94dcbd3e14aab" exitCode=0 Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.662556 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s4krq" event={"ID":"5c4e979c-a064-42b3-bdae-5b01c7c40529","Type":"ContainerDied","Data":"699711b5613fac21492c7b4c88caf7d5ac03931ba09915eae0f94dcbd3e14aab"} Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.662576 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s4krq" Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.662649 4869 scope.go:117] "RemoveContainer" containerID="699711b5613fac21492c7b4c88caf7d5ac03931ba09915eae0f94dcbd3e14aab" Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.662633 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s4krq" event={"ID":"5c4e979c-a064-42b3-bdae-5b01c7c40529","Type":"ContainerDied","Data":"bc0442fb1dbdc220e3b281b07229a7f38c109d9bbf2640b617708d231dfd1715"} Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.705294 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-s4krq"] Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.717143 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-s4krq"] Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.719952 4869 scope.go:117] "RemoveContainer" containerID="76685bc7dfe426b9e24c3ad327ad35595e20c331fa43d2561d3f1001951dcd88" Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.720007 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-z259g" Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.742689 4869 scope.go:117] "RemoveContainer" containerID="5cf13e31064f5bc4d23d67a7d980d3808b5c529b6d7ba3d570847d4f251efd0e" Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.776941 4869 scope.go:117] "RemoveContainer" containerID="699711b5613fac21492c7b4c88caf7d5ac03931ba09915eae0f94dcbd3e14aab" Jan 30 12:21:37 crc kubenswrapper[4869]: E0130 12:21:37.777625 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"699711b5613fac21492c7b4c88caf7d5ac03931ba09915eae0f94dcbd3e14aab\": container with ID starting with 699711b5613fac21492c7b4c88caf7d5ac03931ba09915eae0f94dcbd3e14aab not found: ID does not exist" containerID="699711b5613fac21492c7b4c88caf7d5ac03931ba09915eae0f94dcbd3e14aab" Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.777659 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"699711b5613fac21492c7b4c88caf7d5ac03931ba09915eae0f94dcbd3e14aab"} err="failed to get container status \"699711b5613fac21492c7b4c88caf7d5ac03931ba09915eae0f94dcbd3e14aab\": rpc error: code = NotFound desc = could not find container \"699711b5613fac21492c7b4c88caf7d5ac03931ba09915eae0f94dcbd3e14aab\": container with ID starting with 699711b5613fac21492c7b4c88caf7d5ac03931ba09915eae0f94dcbd3e14aab not found: ID does not exist" Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.777685 4869 scope.go:117] "RemoveContainer" containerID="76685bc7dfe426b9e24c3ad327ad35595e20c331fa43d2561d3f1001951dcd88" Jan 30 12:21:37 crc kubenswrapper[4869]: E0130 12:21:37.777961 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76685bc7dfe426b9e24c3ad327ad35595e20c331fa43d2561d3f1001951dcd88\": container with ID starting with 76685bc7dfe426b9e24c3ad327ad35595e20c331fa43d2561d3f1001951dcd88 not found: ID does not exist" containerID="76685bc7dfe426b9e24c3ad327ad35595e20c331fa43d2561d3f1001951dcd88" Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.777987 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76685bc7dfe426b9e24c3ad327ad35595e20c331fa43d2561d3f1001951dcd88"} err="failed to get container status \"76685bc7dfe426b9e24c3ad327ad35595e20c331fa43d2561d3f1001951dcd88\": rpc error: code = NotFound desc = could not find container \"76685bc7dfe426b9e24c3ad327ad35595e20c331fa43d2561d3f1001951dcd88\": container with ID starting with 76685bc7dfe426b9e24c3ad327ad35595e20c331fa43d2561d3f1001951dcd88 not found: ID does not exist" Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.778001 4869 scope.go:117] "RemoveContainer" containerID="5cf13e31064f5bc4d23d67a7d980d3808b5c529b6d7ba3d570847d4f251efd0e" Jan 30 12:21:37 crc kubenswrapper[4869]: E0130 12:21:37.778257 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5cf13e31064f5bc4d23d67a7d980d3808b5c529b6d7ba3d570847d4f251efd0e\": container with ID starting with 5cf13e31064f5bc4d23d67a7d980d3808b5c529b6d7ba3d570847d4f251efd0e not found: ID does not exist" containerID="5cf13e31064f5bc4d23d67a7d980d3808b5c529b6d7ba3d570847d4f251efd0e" Jan 30 12:21:37 crc kubenswrapper[4869]: I0130 12:21:37.778287 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5cf13e31064f5bc4d23d67a7d980d3808b5c529b6d7ba3d570847d4f251efd0e"} err="failed to get container status \"5cf13e31064f5bc4d23d67a7d980d3808b5c529b6d7ba3d570847d4f251efd0e\": rpc error: code = NotFound desc = could not find container \"5cf13e31064f5bc4d23d67a7d980d3808b5c529b6d7ba3d570847d4f251efd0e\": container with ID starting with 5cf13e31064f5bc4d23d67a7d980d3808b5c529b6d7ba3d570847d4f251efd0e not found: ID does not exist" Jan 30 12:21:38 crc kubenswrapper[4869]: I0130 12:21:38.143919 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c4e979c-a064-42b3-bdae-5b01c7c40529" path="/var/lib/kubelet/pods/5c4e979c-a064-42b3-bdae-5b01c7c40529/volumes" Jan 30 12:21:38 crc kubenswrapper[4869]: I0130 12:21:38.370372 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-kzp9s/crc-debug-g776x"] Jan 30 12:21:38 crc kubenswrapper[4869]: E0130 12:21:38.370957 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c4e979c-a064-42b3-bdae-5b01c7c40529" containerName="extract-utilities" Jan 30 12:21:38 crc kubenswrapper[4869]: I0130 12:21:38.370976 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c4e979c-a064-42b3-bdae-5b01c7c40529" containerName="extract-utilities" Jan 30 12:21:38 crc kubenswrapper[4869]: E0130 12:21:38.371005 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c4e979c-a064-42b3-bdae-5b01c7c40529" containerName="extract-content" Jan 30 12:21:38 crc kubenswrapper[4869]: I0130 12:21:38.371014 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c4e979c-a064-42b3-bdae-5b01c7c40529" containerName="extract-content" Jan 30 12:21:38 crc kubenswrapper[4869]: E0130 12:21:38.371037 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c4e979c-a064-42b3-bdae-5b01c7c40529" containerName="registry-server" Jan 30 12:21:38 crc kubenswrapper[4869]: I0130 12:21:38.371048 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c4e979c-a064-42b3-bdae-5b01c7c40529" containerName="registry-server" Jan 30 12:21:38 crc kubenswrapper[4869]: I0130 12:21:38.371229 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c4e979c-a064-42b3-bdae-5b01c7c40529" containerName="registry-server" Jan 30 12:21:38 crc kubenswrapper[4869]: I0130 12:21:38.371877 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kzp9s/crc-debug-g776x" Jan 30 12:21:38 crc kubenswrapper[4869]: I0130 12:21:38.531452 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0ec127d9-c4be-40bb-bc9a-da99b5311308-host\") pod \"crc-debug-g776x\" (UID: \"0ec127d9-c4be-40bb-bc9a-da99b5311308\") " pod="openshift-must-gather-kzp9s/crc-debug-g776x" Jan 30 12:21:38 crc kubenswrapper[4869]: I0130 12:21:38.531512 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rc9hk\" (UniqueName: \"kubernetes.io/projected/0ec127d9-c4be-40bb-bc9a-da99b5311308-kube-api-access-rc9hk\") pod \"crc-debug-g776x\" (UID: \"0ec127d9-c4be-40bb-bc9a-da99b5311308\") " pod="openshift-must-gather-kzp9s/crc-debug-g776x" Jan 30 12:21:38 crc kubenswrapper[4869]: I0130 12:21:38.633042 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0ec127d9-c4be-40bb-bc9a-da99b5311308-host\") pod \"crc-debug-g776x\" (UID: \"0ec127d9-c4be-40bb-bc9a-da99b5311308\") " pod="openshift-must-gather-kzp9s/crc-debug-g776x" Jan 30 12:21:38 crc kubenswrapper[4869]: I0130 12:21:38.633092 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rc9hk\" (UniqueName: \"kubernetes.io/projected/0ec127d9-c4be-40bb-bc9a-da99b5311308-kube-api-access-rc9hk\") pod \"crc-debug-g776x\" (UID: \"0ec127d9-c4be-40bb-bc9a-da99b5311308\") " pod="openshift-must-gather-kzp9s/crc-debug-g776x" Jan 30 12:21:38 crc kubenswrapper[4869]: I0130 12:21:38.633234 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0ec127d9-c4be-40bb-bc9a-da99b5311308-host\") pod \"crc-debug-g776x\" (UID: \"0ec127d9-c4be-40bb-bc9a-da99b5311308\") " pod="openshift-must-gather-kzp9s/crc-debug-g776x" Jan 30 12:21:38 crc kubenswrapper[4869]: I0130 12:21:38.651906 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rc9hk\" (UniqueName: \"kubernetes.io/projected/0ec127d9-c4be-40bb-bc9a-da99b5311308-kube-api-access-rc9hk\") pod \"crc-debug-g776x\" (UID: \"0ec127d9-c4be-40bb-bc9a-da99b5311308\") " pod="openshift-must-gather-kzp9s/crc-debug-g776x" Jan 30 12:21:38 crc kubenswrapper[4869]: I0130 12:21:38.689943 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kzp9s/crc-debug-g776x" Jan 30 12:21:38 crc kubenswrapper[4869]: W0130 12:21:38.718093 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ec127d9_c4be_40bb_bc9a_da99b5311308.slice/crio-692b40f450811580f48da9d26fec0dc99aafc7c88aa031dcc46af9ae2961f32b WatchSource:0}: Error finding container 692b40f450811580f48da9d26fec0dc99aafc7c88aa031dcc46af9ae2961f32b: Status 404 returned error can't find the container with id 692b40f450811580f48da9d26fec0dc99aafc7c88aa031dcc46af9ae2961f32b Jan 30 12:21:39 crc kubenswrapper[4869]: I0130 12:21:39.700107 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kzp9s/crc-debug-g776x" event={"ID":"0ec127d9-c4be-40bb-bc9a-da99b5311308","Type":"ContainerStarted","Data":"692b40f450811580f48da9d26fec0dc99aafc7c88aa031dcc46af9ae2961f32b"} Jan 30 12:21:39 crc kubenswrapper[4869]: I0130 12:21:39.807080 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-z259g"] Jan 30 12:21:40 crc kubenswrapper[4869]: I0130 12:21:40.708043 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-z259g" podUID="07826881-572d-4562-9056-8dd3a1d87208" containerName="registry-server" containerID="cri-o://b994c44ecc7dd31eae6e29ab88685f15f40cb4d230524c04fa5d72d11b663778" gracePeriod=2 Jan 30 12:21:41 crc kubenswrapper[4869]: I0130 12:21:41.719614 4869 generic.go:334] "Generic (PLEG): container finished" podID="07826881-572d-4562-9056-8dd3a1d87208" containerID="b994c44ecc7dd31eae6e29ab88685f15f40cb4d230524c04fa5d72d11b663778" exitCode=0 Jan 30 12:21:41 crc kubenswrapper[4869]: I0130 12:21:41.720031 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z259g" event={"ID":"07826881-572d-4562-9056-8dd3a1d87208","Type":"ContainerDied","Data":"b994c44ecc7dd31eae6e29ab88685f15f40cb4d230524c04fa5d72d11b663778"} Jan 30 12:21:42 crc kubenswrapper[4869]: I0130 12:21:42.452104 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z259g" Jan 30 12:21:42 crc kubenswrapper[4869]: I0130 12:21:42.616070 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07826881-572d-4562-9056-8dd3a1d87208-utilities\") pod \"07826881-572d-4562-9056-8dd3a1d87208\" (UID: \"07826881-572d-4562-9056-8dd3a1d87208\") " Jan 30 12:21:42 crc kubenswrapper[4869]: I0130 12:21:42.616324 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7l4bk\" (UniqueName: \"kubernetes.io/projected/07826881-572d-4562-9056-8dd3a1d87208-kube-api-access-7l4bk\") pod \"07826881-572d-4562-9056-8dd3a1d87208\" (UID: \"07826881-572d-4562-9056-8dd3a1d87208\") " Jan 30 12:21:42 crc kubenswrapper[4869]: I0130 12:21:42.616416 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07826881-572d-4562-9056-8dd3a1d87208-catalog-content\") pod \"07826881-572d-4562-9056-8dd3a1d87208\" (UID: \"07826881-572d-4562-9056-8dd3a1d87208\") " Jan 30 12:21:42 crc kubenswrapper[4869]: I0130 12:21:42.617125 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07826881-572d-4562-9056-8dd3a1d87208-utilities" (OuterVolumeSpecName: "utilities") pod "07826881-572d-4562-9056-8dd3a1d87208" (UID: "07826881-572d-4562-9056-8dd3a1d87208"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 12:21:42 crc kubenswrapper[4869]: I0130 12:21:42.644110 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07826881-572d-4562-9056-8dd3a1d87208-kube-api-access-7l4bk" (OuterVolumeSpecName: "kube-api-access-7l4bk") pod "07826881-572d-4562-9056-8dd3a1d87208" (UID: "07826881-572d-4562-9056-8dd3a1d87208"). InnerVolumeSpecName "kube-api-access-7l4bk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:21:42 crc kubenswrapper[4869]: I0130 12:21:42.681704 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07826881-572d-4562-9056-8dd3a1d87208-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "07826881-572d-4562-9056-8dd3a1d87208" (UID: "07826881-572d-4562-9056-8dd3a1d87208"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 12:21:42 crc kubenswrapper[4869]: I0130 12:21:42.718150 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07826881-572d-4562-9056-8dd3a1d87208-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 12:21:42 crc kubenswrapper[4869]: I0130 12:21:42.718195 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07826881-572d-4562-9056-8dd3a1d87208-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 12:21:42 crc kubenswrapper[4869]: I0130 12:21:42.718206 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7l4bk\" (UniqueName: \"kubernetes.io/projected/07826881-572d-4562-9056-8dd3a1d87208-kube-api-access-7l4bk\") on node \"crc\" DevicePath \"\"" Jan 30 12:21:42 crc kubenswrapper[4869]: I0130 12:21:42.733277 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z259g" event={"ID":"07826881-572d-4562-9056-8dd3a1d87208","Type":"ContainerDied","Data":"2941783fe82cec6d0d5936164b8054578409e8f621ac59af9880e61ea8c33649"} Jan 30 12:21:42 crc kubenswrapper[4869]: I0130 12:21:42.733352 4869 scope.go:117] "RemoveContainer" containerID="b994c44ecc7dd31eae6e29ab88685f15f40cb4d230524c04fa5d72d11b663778" Jan 30 12:21:42 crc kubenswrapper[4869]: I0130 12:21:42.733351 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z259g" Jan 30 12:21:42 crc kubenswrapper[4869]: I0130 12:21:42.771942 4869 scope.go:117] "RemoveContainer" containerID="2a36eb6a6c3483d03af97b06576afafde2abd839fb9ae75f5b2dd2b2084fa0d2" Jan 30 12:21:42 crc kubenswrapper[4869]: I0130 12:21:42.773349 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-z259g"] Jan 30 12:21:42 crc kubenswrapper[4869]: I0130 12:21:42.779935 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-z259g"] Jan 30 12:21:42 crc kubenswrapper[4869]: I0130 12:21:42.801852 4869 scope.go:117] "RemoveContainer" containerID="d924f3d7e12b85143a9c6822409ada6e2d60ab098b51918a146d9935bda4d89c" Jan 30 12:21:44 crc kubenswrapper[4869]: I0130 12:21:44.148084 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07826881-572d-4562-9056-8dd3a1d87208" path="/var/lib/kubelet/pods/07826881-572d-4562-9056-8dd3a1d87208/volumes" Jan 30 12:21:51 crc kubenswrapper[4869]: I0130 12:21:51.117100 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kzp9s/crc-debug-g776x" event={"ID":"0ec127d9-c4be-40bb-bc9a-da99b5311308","Type":"ContainerStarted","Data":"f0cb776ab6ab9843d0b1aa6741c69afd4f51588ef6d602c7da7c3ae8d4602254"} Jan 30 12:21:51 crc kubenswrapper[4869]: I0130 12:21:51.134640 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-kzp9s/crc-debug-g776x" podStartSLOduration=0.998824568 podStartE2EDuration="13.134617921s" podCreationTimestamp="2026-01-30 12:21:38 +0000 UTC" firstStartedPulling="2026-01-30 12:21:38.721838745 +0000 UTC m=+5249.271714811" lastFinishedPulling="2026-01-30 12:21:50.857632098 +0000 UTC m=+5261.407508164" observedRunningTime="2026-01-30 12:21:51.128108407 +0000 UTC m=+5261.677984473" watchObservedRunningTime="2026-01-30 12:21:51.134617921 +0000 UTC m=+5261.684493987" Jan 30 12:21:51 crc kubenswrapper[4869]: I0130 12:21:51.769984 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 12:21:51 crc kubenswrapper[4869]: I0130 12:21:51.770654 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 12:22:10 crc kubenswrapper[4869]: I0130 12:22:10.285580 4869 generic.go:334] "Generic (PLEG): container finished" podID="0ec127d9-c4be-40bb-bc9a-da99b5311308" containerID="f0cb776ab6ab9843d0b1aa6741c69afd4f51588ef6d602c7da7c3ae8d4602254" exitCode=0 Jan 30 12:22:10 crc kubenswrapper[4869]: I0130 12:22:10.285663 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kzp9s/crc-debug-g776x" event={"ID":"0ec127d9-c4be-40bb-bc9a-da99b5311308","Type":"ContainerDied","Data":"f0cb776ab6ab9843d0b1aa6741c69afd4f51588ef6d602c7da7c3ae8d4602254"} Jan 30 12:22:11 crc kubenswrapper[4869]: I0130 12:22:11.397766 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kzp9s/crc-debug-g776x" Jan 30 12:22:11 crc kubenswrapper[4869]: I0130 12:22:11.435596 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-kzp9s/crc-debug-g776x"] Jan 30 12:22:11 crc kubenswrapper[4869]: I0130 12:22:11.445117 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-kzp9s/crc-debug-g776x"] Jan 30 12:22:11 crc kubenswrapper[4869]: I0130 12:22:11.573028 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0ec127d9-c4be-40bb-bc9a-da99b5311308-host\") pod \"0ec127d9-c4be-40bb-bc9a-da99b5311308\" (UID: \"0ec127d9-c4be-40bb-bc9a-da99b5311308\") " Jan 30 12:22:11 crc kubenswrapper[4869]: I0130 12:22:11.573159 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rc9hk\" (UniqueName: \"kubernetes.io/projected/0ec127d9-c4be-40bb-bc9a-da99b5311308-kube-api-access-rc9hk\") pod \"0ec127d9-c4be-40bb-bc9a-da99b5311308\" (UID: \"0ec127d9-c4be-40bb-bc9a-da99b5311308\") " Jan 30 12:22:11 crc kubenswrapper[4869]: I0130 12:22:11.573202 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0ec127d9-c4be-40bb-bc9a-da99b5311308-host" (OuterVolumeSpecName: "host") pod "0ec127d9-c4be-40bb-bc9a-da99b5311308" (UID: "0ec127d9-c4be-40bb-bc9a-da99b5311308"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 12:22:11 crc kubenswrapper[4869]: I0130 12:22:11.573755 4869 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0ec127d9-c4be-40bb-bc9a-da99b5311308-host\") on node \"crc\" DevicePath \"\"" Jan 30 12:22:11 crc kubenswrapper[4869]: I0130 12:22:11.585997 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ec127d9-c4be-40bb-bc9a-da99b5311308-kube-api-access-rc9hk" (OuterVolumeSpecName: "kube-api-access-rc9hk") pod "0ec127d9-c4be-40bb-bc9a-da99b5311308" (UID: "0ec127d9-c4be-40bb-bc9a-da99b5311308"). InnerVolumeSpecName "kube-api-access-rc9hk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:22:11 crc kubenswrapper[4869]: I0130 12:22:11.675549 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rc9hk\" (UniqueName: \"kubernetes.io/projected/0ec127d9-c4be-40bb-bc9a-da99b5311308-kube-api-access-rc9hk\") on node \"crc\" DevicePath \"\"" Jan 30 12:22:12 crc kubenswrapper[4869]: I0130 12:22:12.145636 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ec127d9-c4be-40bb-bc9a-da99b5311308" path="/var/lib/kubelet/pods/0ec127d9-c4be-40bb-bc9a-da99b5311308/volumes" Jan 30 12:22:12 crc kubenswrapper[4869]: I0130 12:22:12.302911 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kzp9s/crc-debug-g776x" Jan 30 12:22:12 crc kubenswrapper[4869]: I0130 12:22:12.302909 4869 scope.go:117] "RemoveContainer" containerID="f0cb776ab6ab9843d0b1aa6741c69afd4f51588ef6d602c7da7c3ae8d4602254" Jan 30 12:22:12 crc kubenswrapper[4869]: I0130 12:22:12.624334 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-kzp9s/crc-debug-8s9lk"] Jan 30 12:22:12 crc kubenswrapper[4869]: E0130 12:22:12.624814 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07826881-572d-4562-9056-8dd3a1d87208" containerName="extract-utilities" Jan 30 12:22:12 crc kubenswrapper[4869]: I0130 12:22:12.624831 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="07826881-572d-4562-9056-8dd3a1d87208" containerName="extract-utilities" Jan 30 12:22:12 crc kubenswrapper[4869]: E0130 12:22:12.624852 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07826881-572d-4562-9056-8dd3a1d87208" containerName="registry-server" Jan 30 12:22:12 crc kubenswrapper[4869]: I0130 12:22:12.624860 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="07826881-572d-4562-9056-8dd3a1d87208" containerName="registry-server" Jan 30 12:22:12 crc kubenswrapper[4869]: E0130 12:22:12.624876 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07826881-572d-4562-9056-8dd3a1d87208" containerName="extract-content" Jan 30 12:22:12 crc kubenswrapper[4869]: I0130 12:22:12.624883 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="07826881-572d-4562-9056-8dd3a1d87208" containerName="extract-content" Jan 30 12:22:12 crc kubenswrapper[4869]: E0130 12:22:12.624920 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ec127d9-c4be-40bb-bc9a-da99b5311308" containerName="container-00" Jan 30 12:22:12 crc kubenswrapper[4869]: I0130 12:22:12.624927 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ec127d9-c4be-40bb-bc9a-da99b5311308" containerName="container-00" Jan 30 12:22:12 crc kubenswrapper[4869]: I0130 12:22:12.625121 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="07826881-572d-4562-9056-8dd3a1d87208" containerName="registry-server" Jan 30 12:22:12 crc kubenswrapper[4869]: I0130 12:22:12.625149 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ec127d9-c4be-40bb-bc9a-da99b5311308" containerName="container-00" Jan 30 12:22:12 crc kubenswrapper[4869]: I0130 12:22:12.625871 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kzp9s/crc-debug-8s9lk" Jan 30 12:22:12 crc kubenswrapper[4869]: I0130 12:22:12.791899 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-md7cn\" (UniqueName: \"kubernetes.io/projected/3d033dfd-b6e4-48bd-8142-9b16cf4c9b88-kube-api-access-md7cn\") pod \"crc-debug-8s9lk\" (UID: \"3d033dfd-b6e4-48bd-8142-9b16cf4c9b88\") " pod="openshift-must-gather-kzp9s/crc-debug-8s9lk" Jan 30 12:22:12 crc kubenswrapper[4869]: I0130 12:22:12.792348 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3d033dfd-b6e4-48bd-8142-9b16cf4c9b88-host\") pod \"crc-debug-8s9lk\" (UID: \"3d033dfd-b6e4-48bd-8142-9b16cf4c9b88\") " pod="openshift-must-gather-kzp9s/crc-debug-8s9lk" Jan 30 12:22:12 crc kubenswrapper[4869]: I0130 12:22:12.894622 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-md7cn\" (UniqueName: \"kubernetes.io/projected/3d033dfd-b6e4-48bd-8142-9b16cf4c9b88-kube-api-access-md7cn\") pod \"crc-debug-8s9lk\" (UID: \"3d033dfd-b6e4-48bd-8142-9b16cf4c9b88\") " pod="openshift-must-gather-kzp9s/crc-debug-8s9lk" Jan 30 12:22:12 crc kubenswrapper[4869]: I0130 12:22:12.894778 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3d033dfd-b6e4-48bd-8142-9b16cf4c9b88-host\") pod \"crc-debug-8s9lk\" (UID: \"3d033dfd-b6e4-48bd-8142-9b16cf4c9b88\") " pod="openshift-must-gather-kzp9s/crc-debug-8s9lk" Jan 30 12:22:12 crc kubenswrapper[4869]: I0130 12:22:12.894921 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3d033dfd-b6e4-48bd-8142-9b16cf4c9b88-host\") pod \"crc-debug-8s9lk\" (UID: \"3d033dfd-b6e4-48bd-8142-9b16cf4c9b88\") " pod="openshift-must-gather-kzp9s/crc-debug-8s9lk" Jan 30 12:22:12 crc kubenswrapper[4869]: I0130 12:22:12.930637 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-md7cn\" (UniqueName: \"kubernetes.io/projected/3d033dfd-b6e4-48bd-8142-9b16cf4c9b88-kube-api-access-md7cn\") pod \"crc-debug-8s9lk\" (UID: \"3d033dfd-b6e4-48bd-8142-9b16cf4c9b88\") " pod="openshift-must-gather-kzp9s/crc-debug-8s9lk" Jan 30 12:22:12 crc kubenswrapper[4869]: I0130 12:22:12.942194 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kzp9s/crc-debug-8s9lk" Jan 30 12:22:12 crc kubenswrapper[4869]: W0130 12:22:12.988200 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3d033dfd_b6e4_48bd_8142_9b16cf4c9b88.slice/crio-9a30f2ec80674cec8eb67cdf16e9dfef0086198eebf63ded48e131840d518ac7 WatchSource:0}: Error finding container 9a30f2ec80674cec8eb67cdf16e9dfef0086198eebf63ded48e131840d518ac7: Status 404 returned error can't find the container with id 9a30f2ec80674cec8eb67cdf16e9dfef0086198eebf63ded48e131840d518ac7 Jan 30 12:22:13 crc kubenswrapper[4869]: I0130 12:22:13.311721 4869 generic.go:334] "Generic (PLEG): container finished" podID="3d033dfd-b6e4-48bd-8142-9b16cf4c9b88" containerID="69bb080d877f50067da8fc1d036c45afdd8f0d090c802b3198e42467421baa9f" exitCode=1 Jan 30 12:22:13 crc kubenswrapper[4869]: I0130 12:22:13.311811 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kzp9s/crc-debug-8s9lk" event={"ID":"3d033dfd-b6e4-48bd-8142-9b16cf4c9b88","Type":"ContainerDied","Data":"69bb080d877f50067da8fc1d036c45afdd8f0d090c802b3198e42467421baa9f"} Jan 30 12:22:13 crc kubenswrapper[4869]: I0130 12:22:13.311876 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kzp9s/crc-debug-8s9lk" event={"ID":"3d033dfd-b6e4-48bd-8142-9b16cf4c9b88","Type":"ContainerStarted","Data":"9a30f2ec80674cec8eb67cdf16e9dfef0086198eebf63ded48e131840d518ac7"} Jan 30 12:22:13 crc kubenswrapper[4869]: I0130 12:22:13.350452 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-kzp9s/crc-debug-8s9lk"] Jan 30 12:22:13 crc kubenswrapper[4869]: I0130 12:22:13.359045 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-kzp9s/crc-debug-8s9lk"] Jan 30 12:22:14 crc kubenswrapper[4869]: I0130 12:22:14.395868 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kzp9s/crc-debug-8s9lk" Jan 30 12:22:14 crc kubenswrapper[4869]: I0130 12:22:14.516995 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3d033dfd-b6e4-48bd-8142-9b16cf4c9b88-host\") pod \"3d033dfd-b6e4-48bd-8142-9b16cf4c9b88\" (UID: \"3d033dfd-b6e4-48bd-8142-9b16cf4c9b88\") " Jan 30 12:22:14 crc kubenswrapper[4869]: I0130 12:22:14.517157 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-md7cn\" (UniqueName: \"kubernetes.io/projected/3d033dfd-b6e4-48bd-8142-9b16cf4c9b88-kube-api-access-md7cn\") pod \"3d033dfd-b6e4-48bd-8142-9b16cf4c9b88\" (UID: \"3d033dfd-b6e4-48bd-8142-9b16cf4c9b88\") " Jan 30 12:22:14 crc kubenswrapper[4869]: I0130 12:22:14.517186 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3d033dfd-b6e4-48bd-8142-9b16cf4c9b88-host" (OuterVolumeSpecName: "host") pod "3d033dfd-b6e4-48bd-8142-9b16cf4c9b88" (UID: "3d033dfd-b6e4-48bd-8142-9b16cf4c9b88"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 12:22:14 crc kubenswrapper[4869]: I0130 12:22:14.522588 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d033dfd-b6e4-48bd-8142-9b16cf4c9b88-kube-api-access-md7cn" (OuterVolumeSpecName: "kube-api-access-md7cn") pod "3d033dfd-b6e4-48bd-8142-9b16cf4c9b88" (UID: "3d033dfd-b6e4-48bd-8142-9b16cf4c9b88"). InnerVolumeSpecName "kube-api-access-md7cn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:22:14 crc kubenswrapper[4869]: I0130 12:22:14.618963 4869 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3d033dfd-b6e4-48bd-8142-9b16cf4c9b88-host\") on node \"crc\" DevicePath \"\"" Jan 30 12:22:14 crc kubenswrapper[4869]: I0130 12:22:14.619006 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-md7cn\" (UniqueName: \"kubernetes.io/projected/3d033dfd-b6e4-48bd-8142-9b16cf4c9b88-kube-api-access-md7cn\") on node \"crc\" DevicePath \"\"" Jan 30 12:22:15 crc kubenswrapper[4869]: I0130 12:22:15.326865 4869 scope.go:117] "RemoveContainer" containerID="69bb080d877f50067da8fc1d036c45afdd8f0d090c802b3198e42467421baa9f" Jan 30 12:22:15 crc kubenswrapper[4869]: I0130 12:22:15.326903 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kzp9s/crc-debug-8s9lk" Jan 30 12:22:16 crc kubenswrapper[4869]: I0130 12:22:16.141076 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d033dfd-b6e4-48bd-8142-9b16cf4c9b88" path="/var/lib/kubelet/pods/3d033dfd-b6e4-48bd-8142-9b16cf4c9b88/volumes" Jan 30 12:22:21 crc kubenswrapper[4869]: I0130 12:22:21.769660 4869 patch_prober.go:28] interesting pod/machine-config-daemon-99lr2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 12:22:21 crc kubenswrapper[4869]: I0130 12:22:21.770475 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 12:22:21 crc kubenswrapper[4869]: I0130 12:22:21.770537 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" Jan 30 12:22:21 crc kubenswrapper[4869]: I0130 12:22:21.771293 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6"} pod="openshift-machine-config-operator/machine-config-daemon-99lr2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 12:22:21 crc kubenswrapper[4869]: I0130 12:22:21.771364 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" containerName="machine-config-daemon" containerID="cri-o://eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" gracePeriod=600 Jan 30 12:22:21 crc kubenswrapper[4869]: E0130 12:22:21.904489 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:22:22 crc kubenswrapper[4869]: I0130 12:22:22.401257 4869 generic.go:334] "Generic (PLEG): container finished" podID="ef13186b-7f82-4025-97e3-d899be8c207f" containerID="eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" exitCode=0 Jan 30 12:22:22 crc kubenswrapper[4869]: I0130 12:22:22.401566 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerDied","Data":"eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6"} Jan 30 12:22:22 crc kubenswrapper[4869]: I0130 12:22:22.401685 4869 scope.go:117] "RemoveContainer" containerID="2753fcbf560309b5b7e37904d0d2cf7f1caef840cef8861e52eecbabf9d52a12" Jan 30 12:22:22 crc kubenswrapper[4869]: I0130 12:22:22.402704 4869 scope.go:117] "RemoveContainer" containerID="eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" Jan 30 12:22:22 crc kubenswrapper[4869]: E0130 12:22:22.403065 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:22:28 crc kubenswrapper[4869]: I0130 12:22:28.336874 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-67b648cdf7-q9tdl_b8fb3786-2ed0-4f33-8528-8c86b8a69c87/init/0.log" Jan 30 12:22:28 crc kubenswrapper[4869]: I0130 12:22:28.528442 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-67b648cdf7-q9tdl_b8fb3786-2ed0-4f33-8528-8c86b8a69c87/init/0.log" Jan 30 12:22:28 crc kubenswrapper[4869]: I0130 12:22:28.550011 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-67b648cdf7-q9tdl_b8fb3786-2ed0-4f33-8528-8c86b8a69c87/dnsmasq-dns/0.log" Jan 30 12:22:28 crc kubenswrapper[4869]: I0130 12:22:28.741082 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-9a6e-account-create-update-fsr8d_5db9e4f5-3a4b-4021-b569-db288f1501f0/mariadb-account-create-update/0.log" Jan 30 12:22:28 crc kubenswrapper[4869]: I0130 12:22:28.798364 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-bb57f64fd-4jsjt_3e14fe48-4006-43c6-ad0c-18261c5cc38e/keystone-api/0.log" Jan 30 12:22:28 crc kubenswrapper[4869]: I0130 12:22:28.942329 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-bootstrap-dcp9h_787f8551-8c15-4948-802e-6f768a0eae9f/keystone-bootstrap/0.log" Jan 30 12:22:29 crc kubenswrapper[4869]: I0130 12:22:29.026151 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-db-create-skkxw_9bf3ebb4-ddb0-4221-a918-11657d547507/mariadb-database-create/0.log" Jan 30 12:22:29 crc kubenswrapper[4869]: I0130 12:22:29.156385 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-db-sync-sh22r_9b06a4bb-d363-4877-a91a-e42d56568285/keystone-db-sync/0.log" Jan 30 12:22:29 crc kubenswrapper[4869]: I0130 12:22:29.229240 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-copy-data_776504e9-6afb-4bd8-bab6-12aabf4b81f0/adoption/0.log" Jan 30 12:22:29 crc kubenswrapper[4869]: I0130 12:22:29.481003 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_3db80712-bc4e-4418-873f-de37b9970b48/mysql-bootstrap/0.log" Jan 30 12:22:29 crc kubenswrapper[4869]: I0130 12:22:29.689990 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_3db80712-bc4e-4418-873f-de37b9970b48/mysql-bootstrap/0.log" Jan 30 12:22:29 crc kubenswrapper[4869]: I0130 12:22:29.692189 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_3db80712-bc4e-4418-873f-de37b9970b48/galera/0.log" Jan 30 12:22:29 crc kubenswrapper[4869]: I0130 12:22:29.898118 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_2d6f92ae-3351-43f9-ae75-ab887fdf402e/mysql-bootstrap/0.log" Jan 30 12:22:29 crc kubenswrapper[4869]: I0130 12:22:29.926501 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_6248329a-abb5-42cd-b358-9fa425bfb39b/memcached/0.log" Jan 30 12:22:30 crc kubenswrapper[4869]: I0130 12:22:30.053866 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_2d6f92ae-3351-43f9-ae75-ab887fdf402e/mysql-bootstrap/0.log" Jan 30 12:22:30 crc kubenswrapper[4869]: I0130 12:22:30.071311 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_2d6f92ae-3351-43f9-ae75-ab887fdf402e/galera/0.log" Jan 30 12:22:30 crc kubenswrapper[4869]: I0130 12:22:30.131941 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_50c7288f-ea48-4c27-b910-19c2a8dec150/openstackclient/0.log" Jan 30 12:22:30 crc kubenswrapper[4869]: I0130 12:22:30.259745 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-copy-data_32f585a7-6cc0-4ea3-aad6-1fd82d3a0358/adoption/0.log" Jan 30 12:22:30 crc kubenswrapper[4869]: I0130 12:22:30.379900 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_1dcb5f54-fb8c-453a-a320-a6504f9fa441/openstack-network-exporter/0.log" Jan 30 12:22:30 crc kubenswrapper[4869]: I0130 12:22:30.445584 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_1dcb5f54-fb8c-453a-a320-a6504f9fa441/ovn-northd/0.log" Jan 30 12:22:30 crc kubenswrapper[4869]: I0130 12:22:30.563399 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_d68922d2-7bd8-45a4-94ca-742713db6ceb/ovsdbserver-nb/0.log" Jan 30 12:22:30 crc kubenswrapper[4869]: I0130 12:22:30.665751 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_d68922d2-7bd8-45a4-94ca-742713db6ceb/openstack-network-exporter/0.log" Jan 30 12:22:30 crc kubenswrapper[4869]: I0130 12:22:30.729066 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_3090f286-0536-4b7a-ae11-f9fff3403717/openstack-network-exporter/0.log" Jan 30 12:22:30 crc kubenswrapper[4869]: I0130 12:22:30.832250 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_3090f286-0536-4b7a-ae11-f9fff3403717/ovsdbserver-nb/0.log" Jan 30 12:22:30 crc kubenswrapper[4869]: I0130 12:22:30.909259 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_6ca04c49-f111-4cf1-a9aa-5da441c05b28/ovsdbserver-nb/0.log" Jan 30 12:22:30 crc kubenswrapper[4869]: I0130 12:22:30.940871 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_6ca04c49-f111-4cf1-a9aa-5da441c05b28/openstack-network-exporter/0.log" Jan 30 12:22:31 crc kubenswrapper[4869]: I0130 12:22:31.093142 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_ec554cf4-4bb3-4861-9763-1d754d0f2c2e/ovsdbserver-sb/0.log" Jan 30 12:22:31 crc kubenswrapper[4869]: I0130 12:22:31.094876 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_ec554cf4-4bb3-4861-9763-1d754d0f2c2e/openstack-network-exporter/0.log" Jan 30 12:22:31 crc kubenswrapper[4869]: I0130 12:22:31.281082 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_85a8c097-912b-4208-bcbf-606581cfba77/openstack-network-exporter/0.log" Jan 30 12:22:31 crc kubenswrapper[4869]: I0130 12:22:31.285590 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_85a8c097-912b-4208-bcbf-606581cfba77/ovsdbserver-sb/0.log" Jan 30 12:22:31 crc kubenswrapper[4869]: I0130 12:22:31.349034 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_37b2cba6-4508-4425-af92-7514674301c4/openstack-network-exporter/0.log" Jan 30 12:22:31 crc kubenswrapper[4869]: I0130 12:22:31.481146 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_37b2cba6-4508-4425-af92-7514674301c4/ovsdbserver-sb/0.log" Jan 30 12:22:31 crc kubenswrapper[4869]: I0130 12:22:31.534198 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_8c74a886-5483-43ae-a293-00cf837302c6/setup-container/0.log" Jan 30 12:22:31 crc kubenswrapper[4869]: I0130 12:22:31.727356 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_8c74a886-5483-43ae-a293-00cf837302c6/setup-container/0.log" Jan 30 12:22:31 crc kubenswrapper[4869]: I0130 12:22:31.761490 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_8c74a886-5483-43ae-a293-00cf837302c6/rabbitmq/0.log" Jan 30 12:22:31 crc kubenswrapper[4869]: I0130 12:22:31.829827 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_0f975286-9c86-4dd2-a1df-170254db9def/setup-container/0.log" Jan 30 12:22:31 crc kubenswrapper[4869]: I0130 12:22:31.953912 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_0f975286-9c86-4dd2-a1df-170254db9def/setup-container/0.log" Jan 30 12:22:31 crc kubenswrapper[4869]: I0130 12:22:31.974357 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_0f975286-9c86-4dd2-a1df-170254db9def/rabbitmq/0.log" Jan 30 12:22:37 crc kubenswrapper[4869]: I0130 12:22:37.133396 4869 scope.go:117] "RemoveContainer" containerID="eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" Jan 30 12:22:37 crc kubenswrapper[4869]: E0130 12:22:37.134243 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:22:47 crc kubenswrapper[4869]: I0130 12:22:47.023256 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb_56568718-0940-4bb5-be9e-842cef4a2cdb/util/0.log" Jan 30 12:22:47 crc kubenswrapper[4869]: I0130 12:22:47.248039 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb_56568718-0940-4bb5-be9e-842cef4a2cdb/util/0.log" Jan 30 12:22:47 crc kubenswrapper[4869]: I0130 12:22:47.286271 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb_56568718-0940-4bb5-be9e-842cef4a2cdb/pull/0.log" Jan 30 12:22:47 crc kubenswrapper[4869]: I0130 12:22:47.307200 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb_56568718-0940-4bb5-be9e-842cef4a2cdb/pull/0.log" Jan 30 12:22:47 crc kubenswrapper[4869]: I0130 12:22:47.485015 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb_56568718-0940-4bb5-be9e-842cef4a2cdb/util/0.log" Jan 30 12:22:47 crc kubenswrapper[4869]: I0130 12:22:47.494269 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb_56568718-0940-4bb5-be9e-842cef4a2cdb/pull/0.log" Jan 30 12:22:47 crc kubenswrapper[4869]: I0130 12:22:47.497970 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b462b3f6abce858542c39e35c39537eb64744c29aea262ea8d08bbe467tp2wb_56568718-0940-4bb5-be9e-842cef4a2cdb/extract/0.log" Jan 30 12:22:47 crc kubenswrapper[4869]: I0130 12:22:47.742486 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-5f9bbdc844-56ngn_f0d6cb91-dba9-4395-9438-8ab72ea16207/manager/0.log" Jan 30 12:22:47 crc kubenswrapper[4869]: I0130 12:22:47.772653 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-566c8844c5-llwtw_88221c24-f744-4a85-9f3e-cede7b0a4f67/manager/0.log" Jan 30 12:22:48 crc kubenswrapper[4869]: I0130 12:22:48.106171 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-8f4c5cb64-wgddf_4eb7653e-3fa6-4479-a1de-66d77e70b60c/manager/0.log" Jan 30 12:22:48 crc kubenswrapper[4869]: I0130 12:22:48.207765 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-784f59d4f4-hgkkg_818c8e4f-f344-4544-a562-35fd8865bdb9/manager/0.log" Jan 30 12:22:48 crc kubenswrapper[4869]: I0130 12:22:48.323403 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-54985f5875-6m8mq_edcec497-d8a5-4cc4-b966-90bda3727925/manager/0.log" Jan 30 12:22:48 crc kubenswrapper[4869]: I0130 12:22:48.417835 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-5fb775575f-q2f9n_68a4d7f9-c03f-4552-8571-344434546d04/manager/0.log" Jan 30 12:22:48 crc kubenswrapper[4869]: I0130 12:22:48.694186 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6fd9bbb6f6-ql2pg_e9e042ed-4c1e-430b-bb7e-fda28cbef607/manager/0.log" Jan 30 12:22:48 crc kubenswrapper[4869]: I0130 12:22:48.859092 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-79955696d6-ghcvm_11587507-3c83-42d5-af04-3e352e7c7689/manager/0.log" Jan 30 12:22:48 crc kubenswrapper[4869]: I0130 12:22:48.958527 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-6c9d56f9bd-n5pbt_495b2a0c-3cb3-4a22-8609-eb786d6c693f/manager/0.log" Jan 30 12:22:49 crc kubenswrapper[4869]: I0130 12:22:49.014737 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-74954f9f78-flsxj_0a3ad98c-dec5-417c-890a-227fcab3d149/manager/0.log" Jan 30 12:22:49 crc kubenswrapper[4869]: I0130 12:22:49.189581 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-67bf948998-22kgf_484fb11b-1e2c-40c8-944d-a34a6fbaed79/manager/0.log" Jan 30 12:22:49 crc kubenswrapper[4869]: I0130 12:22:49.234166 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-6cfc4f6754-gcvf8_22150fc4-3e93-45fd-9301-f7b552f57f48/manager/0.log" Jan 30 12:22:49 crc kubenswrapper[4869]: I0130 12:22:49.433621 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-694c6dcf95-m4d9d_757e291c-f7c9-4b61-9ed8-5e78c4ffe989/manager/0.log" Jan 30 12:22:49 crc kubenswrapper[4869]: I0130 12:22:49.446167 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-67f5956bc9-vs89l_ab8e3667-4a4c-47df-b46c-1d3d5a315fe0/manager/0.log" Jan 30 12:22:49 crc kubenswrapper[4869]: I0130 12:22:49.634729 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-59c4b45c4dsnw8j_28c859ed-db94-494c-afa3-c1cb96425ac5/manager/0.log" Jan 30 12:22:49 crc kubenswrapper[4869]: I0130 12:22:49.927036 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-595d55cff7-wpkm2_d1cf852a-16c5-4391-a885-c91bf7cb80e3/operator/0.log" Jan 30 12:22:50 crc kubenswrapper[4869]: I0130 12:22:50.043548 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-zz5jk_3d6e0112-9414-4306-965b-721b67035025/registry-server/0.log" Jan 30 12:22:50 crc kubenswrapper[4869]: I0130 12:22:50.137505 4869 scope.go:117] "RemoveContainer" containerID="eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" Jan 30 12:22:50 crc kubenswrapper[4869]: E0130 12:22:50.137760 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:22:50 crc kubenswrapper[4869]: I0130 12:22:50.197009 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-788c46999f-lvrcp_50843920-ef36-4230-8840-0d34b70f602b/manager/0.log" Jan 30 12:22:50 crc kubenswrapper[4869]: I0130 12:22:50.445128 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5b964cf4cd-tk8kg_61332cae-942e-475a-85b9-2020908d8266/manager/0.log" Jan 30 12:22:50 crc kubenswrapper[4869]: I0130 12:22:50.531492 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-bxjxm_1361000f-8ad7-4e93-b7cc-c059e5ba6641/operator/0.log" Jan 30 12:22:50 crc kubenswrapper[4869]: I0130 12:22:50.670086 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-5f57b98d7d-srxcx_e5597fb4-0c80-4868-ae07-b38449e7a4af/manager/0.log" Jan 30 12:22:50 crc kubenswrapper[4869]: I0130 12:22:50.779972 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-7d4f9d9c9b-8hsst_6e096fd6-f27e-4561-a86b-8991d11d82e7/manager/0.log" Jan 30 12:22:50 crc kubenswrapper[4869]: I0130 12:22:50.949676 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cd99594-rjwgj_64399a12-4f5e-42e8-bc4b-b80347287a30/manager/0.log" Jan 30 12:22:51 crc kubenswrapper[4869]: I0130 12:22:51.079497 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-56f8bfcd9f-28mvf_79e9a1e6-68d5-422a-9446-0d4f106f5f22/manager/0.log" Jan 30 12:22:51 crc kubenswrapper[4869]: I0130 12:22:51.180693 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-5bf648c946-89m4w_fffb7d27-e4fa-4ba1-8a0b-cc6e5d18bd29/manager/0.log" Jan 30 12:23:04 crc kubenswrapper[4869]: I0130 12:23:04.133808 4869 scope.go:117] "RemoveContainer" containerID="eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" Jan 30 12:23:04 crc kubenswrapper[4869]: E0130 12:23:04.134796 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:23:11 crc kubenswrapper[4869]: I0130 12:23:11.051220 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-xr2sj_8a9ab7f0-7df0-452e-a879-3a7344a1778f/control-plane-machine-set-operator/0.log" Jan 30 12:23:11 crc kubenswrapper[4869]: I0130 12:23:11.311944 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-cdspc_04600a3e-ea6a-4828-bf49-4f97a92f2f4d/machine-api-operator/0.log" Jan 30 12:23:11 crc kubenswrapper[4869]: I0130 12:23:11.323185 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-cdspc_04600a3e-ea6a-4828-bf49-4f97a92f2f4d/kube-rbac-proxy/0.log" Jan 30 12:23:16 crc kubenswrapper[4869]: I0130 12:23:16.185129 4869 scope.go:117] "RemoveContainer" containerID="cb67bf96b90e0088f5ec0c897d9916ea8c6f143b8f35ff492bdf1b76eba2613b" Jan 30 12:23:17 crc kubenswrapper[4869]: I0130 12:23:17.134193 4869 scope.go:117] "RemoveContainer" containerID="eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" Jan 30 12:23:17 crc kubenswrapper[4869]: E0130 12:23:17.134701 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:23:24 crc kubenswrapper[4869]: I0130 12:23:24.375391 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fczbl"] Jan 30 12:23:24 crc kubenswrapper[4869]: E0130 12:23:24.376453 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d033dfd-b6e4-48bd-8142-9b16cf4c9b88" containerName="container-00" Jan 30 12:23:24 crc kubenswrapper[4869]: I0130 12:23:24.376468 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d033dfd-b6e4-48bd-8142-9b16cf4c9b88" containerName="container-00" Jan 30 12:23:24 crc kubenswrapper[4869]: I0130 12:23:24.376628 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d033dfd-b6e4-48bd-8142-9b16cf4c9b88" containerName="container-00" Jan 30 12:23:24 crc kubenswrapper[4869]: I0130 12:23:24.377842 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fczbl" Jan 30 12:23:24 crc kubenswrapper[4869]: I0130 12:23:24.392272 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fczbl"] Jan 30 12:23:24 crc kubenswrapper[4869]: I0130 12:23:24.516154 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88cb91b0-6772-433c-bf6f-ef2e850c448e-catalog-content\") pod \"certified-operators-fczbl\" (UID: \"88cb91b0-6772-433c-bf6f-ef2e850c448e\") " pod="openshift-marketplace/certified-operators-fczbl" Jan 30 12:23:24 crc kubenswrapper[4869]: I0130 12:23:24.516565 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88cb91b0-6772-433c-bf6f-ef2e850c448e-utilities\") pod \"certified-operators-fczbl\" (UID: \"88cb91b0-6772-433c-bf6f-ef2e850c448e\") " pod="openshift-marketplace/certified-operators-fczbl" Jan 30 12:23:24 crc kubenswrapper[4869]: I0130 12:23:24.516592 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vrzh\" (UniqueName: \"kubernetes.io/projected/88cb91b0-6772-433c-bf6f-ef2e850c448e-kube-api-access-4vrzh\") pod \"certified-operators-fczbl\" (UID: \"88cb91b0-6772-433c-bf6f-ef2e850c448e\") " pod="openshift-marketplace/certified-operators-fczbl" Jan 30 12:23:24 crc kubenswrapper[4869]: I0130 12:23:24.618326 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88cb91b0-6772-433c-bf6f-ef2e850c448e-catalog-content\") pod \"certified-operators-fczbl\" (UID: \"88cb91b0-6772-433c-bf6f-ef2e850c448e\") " pod="openshift-marketplace/certified-operators-fczbl" Jan 30 12:23:24 crc kubenswrapper[4869]: I0130 12:23:24.618383 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88cb91b0-6772-433c-bf6f-ef2e850c448e-utilities\") pod \"certified-operators-fczbl\" (UID: \"88cb91b0-6772-433c-bf6f-ef2e850c448e\") " pod="openshift-marketplace/certified-operators-fczbl" Jan 30 12:23:24 crc kubenswrapper[4869]: I0130 12:23:24.618404 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vrzh\" (UniqueName: \"kubernetes.io/projected/88cb91b0-6772-433c-bf6f-ef2e850c448e-kube-api-access-4vrzh\") pod \"certified-operators-fczbl\" (UID: \"88cb91b0-6772-433c-bf6f-ef2e850c448e\") " pod="openshift-marketplace/certified-operators-fczbl" Jan 30 12:23:24 crc kubenswrapper[4869]: I0130 12:23:24.619296 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88cb91b0-6772-433c-bf6f-ef2e850c448e-catalog-content\") pod \"certified-operators-fczbl\" (UID: \"88cb91b0-6772-433c-bf6f-ef2e850c448e\") " pod="openshift-marketplace/certified-operators-fczbl" Jan 30 12:23:24 crc kubenswrapper[4869]: I0130 12:23:24.619590 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88cb91b0-6772-433c-bf6f-ef2e850c448e-utilities\") pod \"certified-operators-fczbl\" (UID: \"88cb91b0-6772-433c-bf6f-ef2e850c448e\") " pod="openshift-marketplace/certified-operators-fczbl" Jan 30 12:23:24 crc kubenswrapper[4869]: I0130 12:23:24.622314 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-545d4d4674-wpxsv_a3620bab-0584-4515-aeda-9aac66fc26d1/cert-manager-controller/0.log" Jan 30 12:23:24 crc kubenswrapper[4869]: I0130 12:23:24.641323 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vrzh\" (UniqueName: \"kubernetes.io/projected/88cb91b0-6772-433c-bf6f-ef2e850c448e-kube-api-access-4vrzh\") pod \"certified-operators-fczbl\" (UID: \"88cb91b0-6772-433c-bf6f-ef2e850c448e\") " pod="openshift-marketplace/certified-operators-fczbl" Jan 30 12:23:24 crc kubenswrapper[4869]: I0130 12:23:24.721576 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fczbl" Jan 30 12:23:25 crc kubenswrapper[4869]: I0130 12:23:25.086170 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-5545bd876-hncd5_d400b3b0-9a2e-4034-8215-c6007a68665a/cert-manager-cainjector/0.log" Jan 30 12:23:25 crc kubenswrapper[4869]: I0130 12:23:25.091256 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-6888856db4-m4p9s_e7ef7489-3636-49d3-8cc0-a56c617dc974/cert-manager-webhook/0.log" Jan 30 12:23:25 crc kubenswrapper[4869]: I0130 12:23:25.271955 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fczbl"] Jan 30 12:23:25 crc kubenswrapper[4869]: I0130 12:23:25.942270 4869 generic.go:334] "Generic (PLEG): container finished" podID="88cb91b0-6772-433c-bf6f-ef2e850c448e" containerID="6cfbc43446f458ab3b20b75301e684edc2eb034e6bb9432de46790791f49ccd0" exitCode=0 Jan 30 12:23:25 crc kubenswrapper[4869]: I0130 12:23:25.942367 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fczbl" event={"ID":"88cb91b0-6772-433c-bf6f-ef2e850c448e","Type":"ContainerDied","Data":"6cfbc43446f458ab3b20b75301e684edc2eb034e6bb9432de46790791f49ccd0"} Jan 30 12:23:25 crc kubenswrapper[4869]: I0130 12:23:25.942638 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fczbl" event={"ID":"88cb91b0-6772-433c-bf6f-ef2e850c448e","Type":"ContainerStarted","Data":"579bda564fcb96ff1cb6392afa734d693107c0002126dba260fe72f2ef94ca4d"} Jan 30 12:23:26 crc kubenswrapper[4869]: I0130 12:23:26.954208 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fczbl" event={"ID":"88cb91b0-6772-433c-bf6f-ef2e850c448e","Type":"ContainerStarted","Data":"e14cf9bf1480be42eec84570fd93a826871825f35e90175067ef6d4aaf66147b"} Jan 30 12:23:27 crc kubenswrapper[4869]: I0130 12:23:27.963939 4869 generic.go:334] "Generic (PLEG): container finished" podID="88cb91b0-6772-433c-bf6f-ef2e850c448e" containerID="e14cf9bf1480be42eec84570fd93a826871825f35e90175067ef6d4aaf66147b" exitCode=0 Jan 30 12:23:27 crc kubenswrapper[4869]: I0130 12:23:27.964025 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fczbl" event={"ID":"88cb91b0-6772-433c-bf6f-ef2e850c448e","Type":"ContainerDied","Data":"e14cf9bf1480be42eec84570fd93a826871825f35e90175067ef6d4aaf66147b"} Jan 30 12:23:28 crc kubenswrapper[4869]: I0130 12:23:28.136466 4869 scope.go:117] "RemoveContainer" containerID="eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" Jan 30 12:23:28 crc kubenswrapper[4869]: E0130 12:23:28.136723 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:23:28 crc kubenswrapper[4869]: I0130 12:23:28.974383 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fczbl" event={"ID":"88cb91b0-6772-433c-bf6f-ef2e850c448e","Type":"ContainerStarted","Data":"83c800b6e05c53bdaff5c4aae49b478a9125cb6975ef3dcdc1b4e027c53907fd"} Jan 30 12:23:28 crc kubenswrapper[4869]: I0130 12:23:28.999008 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fczbl" podStartSLOduration=2.619010952 podStartE2EDuration="4.998979414s" podCreationTimestamp="2026-01-30 12:23:24 +0000 UTC" firstStartedPulling="2026-01-30 12:23:25.943825749 +0000 UTC m=+5356.493701815" lastFinishedPulling="2026-01-30 12:23:28.323794211 +0000 UTC m=+5358.873670277" observedRunningTime="2026-01-30 12:23:28.994394885 +0000 UTC m=+5359.544270951" watchObservedRunningTime="2026-01-30 12:23:28.998979414 +0000 UTC m=+5359.548855490" Jan 30 12:23:32 crc kubenswrapper[4869]: I0130 12:23:32.158373 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6wgxd"] Jan 30 12:23:32 crc kubenswrapper[4869]: I0130 12:23:32.161215 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6wgxd" Jan 30 12:23:32 crc kubenswrapper[4869]: I0130 12:23:32.172442 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6wgxd"] Jan 30 12:23:32 crc kubenswrapper[4869]: I0130 12:23:32.273080 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af961768-4f28-48fa-98e3-b0219133a41a-utilities\") pod \"redhat-operators-6wgxd\" (UID: \"af961768-4f28-48fa-98e3-b0219133a41a\") " pod="openshift-marketplace/redhat-operators-6wgxd" Jan 30 12:23:32 crc kubenswrapper[4869]: I0130 12:23:32.273200 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af961768-4f28-48fa-98e3-b0219133a41a-catalog-content\") pod \"redhat-operators-6wgxd\" (UID: \"af961768-4f28-48fa-98e3-b0219133a41a\") " pod="openshift-marketplace/redhat-operators-6wgxd" Jan 30 12:23:32 crc kubenswrapper[4869]: I0130 12:23:32.273450 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8f7z\" (UniqueName: \"kubernetes.io/projected/af961768-4f28-48fa-98e3-b0219133a41a-kube-api-access-j8f7z\") pod \"redhat-operators-6wgxd\" (UID: \"af961768-4f28-48fa-98e3-b0219133a41a\") " pod="openshift-marketplace/redhat-operators-6wgxd" Jan 30 12:23:32 crc kubenswrapper[4869]: I0130 12:23:32.376809 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af961768-4f28-48fa-98e3-b0219133a41a-utilities\") pod \"redhat-operators-6wgxd\" (UID: \"af961768-4f28-48fa-98e3-b0219133a41a\") " pod="openshift-marketplace/redhat-operators-6wgxd" Jan 30 12:23:32 crc kubenswrapper[4869]: I0130 12:23:32.376944 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af961768-4f28-48fa-98e3-b0219133a41a-catalog-content\") pod \"redhat-operators-6wgxd\" (UID: \"af961768-4f28-48fa-98e3-b0219133a41a\") " pod="openshift-marketplace/redhat-operators-6wgxd" Jan 30 12:23:32 crc kubenswrapper[4869]: I0130 12:23:32.377031 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8f7z\" (UniqueName: \"kubernetes.io/projected/af961768-4f28-48fa-98e3-b0219133a41a-kube-api-access-j8f7z\") pod \"redhat-operators-6wgxd\" (UID: \"af961768-4f28-48fa-98e3-b0219133a41a\") " pod="openshift-marketplace/redhat-operators-6wgxd" Jan 30 12:23:32 crc kubenswrapper[4869]: I0130 12:23:32.377732 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af961768-4f28-48fa-98e3-b0219133a41a-catalog-content\") pod \"redhat-operators-6wgxd\" (UID: \"af961768-4f28-48fa-98e3-b0219133a41a\") " pod="openshift-marketplace/redhat-operators-6wgxd" Jan 30 12:23:32 crc kubenswrapper[4869]: I0130 12:23:32.377692 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af961768-4f28-48fa-98e3-b0219133a41a-utilities\") pod \"redhat-operators-6wgxd\" (UID: \"af961768-4f28-48fa-98e3-b0219133a41a\") " pod="openshift-marketplace/redhat-operators-6wgxd" Jan 30 12:23:32 crc kubenswrapper[4869]: I0130 12:23:32.402679 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8f7z\" (UniqueName: \"kubernetes.io/projected/af961768-4f28-48fa-98e3-b0219133a41a-kube-api-access-j8f7z\") pod \"redhat-operators-6wgxd\" (UID: \"af961768-4f28-48fa-98e3-b0219133a41a\") " pod="openshift-marketplace/redhat-operators-6wgxd" Jan 30 12:23:32 crc kubenswrapper[4869]: I0130 12:23:32.485830 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6wgxd" Jan 30 12:23:32 crc kubenswrapper[4869]: I0130 12:23:32.965238 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6wgxd"] Jan 30 12:23:33 crc kubenswrapper[4869]: I0130 12:23:33.012538 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6wgxd" event={"ID":"af961768-4f28-48fa-98e3-b0219133a41a","Type":"ContainerStarted","Data":"44b6712fd92771f263e12d7dd14291fd3fb782d886647f82c48abb4923d9fb24"} Jan 30 12:23:34 crc kubenswrapper[4869]: I0130 12:23:34.031218 4869 generic.go:334] "Generic (PLEG): container finished" podID="af961768-4f28-48fa-98e3-b0219133a41a" containerID="deba7d7c335a5dbe9684e4e9026755fef7d2563ede5de72c391d669964f2e83c" exitCode=0 Jan 30 12:23:34 crc kubenswrapper[4869]: I0130 12:23:34.031326 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6wgxd" event={"ID":"af961768-4f28-48fa-98e3-b0219133a41a","Type":"ContainerDied","Data":"deba7d7c335a5dbe9684e4e9026755fef7d2563ede5de72c391d669964f2e83c"} Jan 30 12:23:34 crc kubenswrapper[4869]: I0130 12:23:34.722064 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fczbl" Jan 30 12:23:34 crc kubenswrapper[4869]: I0130 12:23:34.722431 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fczbl" Jan 30 12:23:34 crc kubenswrapper[4869]: I0130 12:23:34.768839 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fczbl" Jan 30 12:23:35 crc kubenswrapper[4869]: I0130 12:23:35.092034 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fczbl" Jan 30 12:23:36 crc kubenswrapper[4869]: I0130 12:23:36.048506 4869 generic.go:334] "Generic (PLEG): container finished" podID="af961768-4f28-48fa-98e3-b0219133a41a" containerID="05e3d4bde39c8261c6b8401db3a4917e33a24da753f6f666ead4815e9669c1d6" exitCode=0 Jan 30 12:23:36 crc kubenswrapper[4869]: I0130 12:23:36.048623 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6wgxd" event={"ID":"af961768-4f28-48fa-98e3-b0219133a41a","Type":"ContainerDied","Data":"05e3d4bde39c8261c6b8401db3a4917e33a24da753f6f666ead4815e9669c1d6"} Jan 30 12:23:36 crc kubenswrapper[4869]: I0130 12:23:36.548518 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fczbl"] Jan 30 12:23:37 crc kubenswrapper[4869]: I0130 12:23:37.058845 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6wgxd" event={"ID":"af961768-4f28-48fa-98e3-b0219133a41a","Type":"ContainerStarted","Data":"1cf9ceaa3dc63d9adb4a56f1d55dffea780b5a23bc678f409d0c0872765c79d1"} Jan 30 12:23:37 crc kubenswrapper[4869]: I0130 12:23:37.081136 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6wgxd" podStartSLOduration=2.4454550729999998 podStartE2EDuration="5.081112286s" podCreationTimestamp="2026-01-30 12:23:32 +0000 UTC" firstStartedPulling="2026-01-30 12:23:34.032933468 +0000 UTC m=+5364.582809534" lastFinishedPulling="2026-01-30 12:23:36.668590681 +0000 UTC m=+5367.218466747" observedRunningTime="2026-01-30 12:23:37.076239718 +0000 UTC m=+5367.626115794" watchObservedRunningTime="2026-01-30 12:23:37.081112286 +0000 UTC m=+5367.630988352" Jan 30 12:23:37 crc kubenswrapper[4869]: I0130 12:23:37.920201 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-vpv9w_0fb8bb1d-8d05-4b95-a466-40fabf706e11/nmstate-console-plugin/0.log" Jan 30 12:23:38 crc kubenswrapper[4869]: I0130 12:23:38.065559 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fczbl" podUID="88cb91b0-6772-433c-bf6f-ef2e850c448e" containerName="registry-server" containerID="cri-o://83c800b6e05c53bdaff5c4aae49b478a9125cb6975ef3dcdc1b4e027c53907fd" gracePeriod=2 Jan 30 12:23:38 crc kubenswrapper[4869]: I0130 12:23:38.166308 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-jd8kn_ab78e821-7f8a-43c5-a857-3694754330a1/nmstate-handler/0.log" Jan 30 12:23:38 crc kubenswrapper[4869]: I0130 12:23:38.275625 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-6rnll_2b36143a-20b8-40d9-a94f-ba14118e00bc/kube-rbac-proxy/0.log" Jan 30 12:23:38 crc kubenswrapper[4869]: I0130 12:23:38.354216 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-6rnll_2b36143a-20b8-40d9-a94f-ba14118e00bc/nmstate-metrics/0.log" Jan 30 12:23:38 crc kubenswrapper[4869]: I0130 12:23:38.553736 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-qx2rr_89dc09e0-cece-4665-b6cf-5cd9fa7ea314/nmstate-operator/0.log" Jan 30 12:23:38 crc kubenswrapper[4869]: I0130 12:23:38.560405 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fczbl" Jan 30 12:23:38 crc kubenswrapper[4869]: I0130 12:23:38.689742 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88cb91b0-6772-433c-bf6f-ef2e850c448e-catalog-content\") pod \"88cb91b0-6772-433c-bf6f-ef2e850c448e\" (UID: \"88cb91b0-6772-433c-bf6f-ef2e850c448e\") " Jan 30 12:23:38 crc kubenswrapper[4869]: I0130 12:23:38.689829 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vrzh\" (UniqueName: \"kubernetes.io/projected/88cb91b0-6772-433c-bf6f-ef2e850c448e-kube-api-access-4vrzh\") pod \"88cb91b0-6772-433c-bf6f-ef2e850c448e\" (UID: \"88cb91b0-6772-433c-bf6f-ef2e850c448e\") " Jan 30 12:23:38 crc kubenswrapper[4869]: I0130 12:23:38.689902 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88cb91b0-6772-433c-bf6f-ef2e850c448e-utilities\") pod \"88cb91b0-6772-433c-bf6f-ef2e850c448e\" (UID: \"88cb91b0-6772-433c-bf6f-ef2e850c448e\") " Jan 30 12:23:38 crc kubenswrapper[4869]: I0130 12:23:38.690975 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88cb91b0-6772-433c-bf6f-ef2e850c448e-utilities" (OuterVolumeSpecName: "utilities") pod "88cb91b0-6772-433c-bf6f-ef2e850c448e" (UID: "88cb91b0-6772-433c-bf6f-ef2e850c448e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 12:23:38 crc kubenswrapper[4869]: I0130 12:23:38.700499 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88cb91b0-6772-433c-bf6f-ef2e850c448e-kube-api-access-4vrzh" (OuterVolumeSpecName: "kube-api-access-4vrzh") pod "88cb91b0-6772-433c-bf6f-ef2e850c448e" (UID: "88cb91b0-6772-433c-bf6f-ef2e850c448e"). InnerVolumeSpecName "kube-api-access-4vrzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:23:38 crc kubenswrapper[4869]: I0130 12:23:38.743550 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88cb91b0-6772-433c-bf6f-ef2e850c448e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "88cb91b0-6772-433c-bf6f-ef2e850c448e" (UID: "88cb91b0-6772-433c-bf6f-ef2e850c448e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 12:23:38 crc kubenswrapper[4869]: I0130 12:23:38.791465 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88cb91b0-6772-433c-bf6f-ef2e850c448e-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 12:23:38 crc kubenswrapper[4869]: I0130 12:23:38.791801 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vrzh\" (UniqueName: \"kubernetes.io/projected/88cb91b0-6772-433c-bf6f-ef2e850c448e-kube-api-access-4vrzh\") on node \"crc\" DevicePath \"\"" Jan 30 12:23:38 crc kubenswrapper[4869]: I0130 12:23:38.791913 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88cb91b0-6772-433c-bf6f-ef2e850c448e-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 12:23:38 crc kubenswrapper[4869]: I0130 12:23:38.795133 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-7n7pb_d6be4693-290f-45f6-8783-4e28ab1e4578/nmstate-webhook/0.log" Jan 30 12:23:39 crc kubenswrapper[4869]: I0130 12:23:39.074429 4869 generic.go:334] "Generic (PLEG): container finished" podID="88cb91b0-6772-433c-bf6f-ef2e850c448e" containerID="83c800b6e05c53bdaff5c4aae49b478a9125cb6975ef3dcdc1b4e027c53907fd" exitCode=0 Jan 30 12:23:39 crc kubenswrapper[4869]: I0130 12:23:39.074478 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fczbl" Jan 30 12:23:39 crc kubenswrapper[4869]: I0130 12:23:39.074490 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fczbl" event={"ID":"88cb91b0-6772-433c-bf6f-ef2e850c448e","Type":"ContainerDied","Data":"83c800b6e05c53bdaff5c4aae49b478a9125cb6975ef3dcdc1b4e027c53907fd"} Jan 30 12:23:39 crc kubenswrapper[4869]: I0130 12:23:39.075231 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fczbl" event={"ID":"88cb91b0-6772-433c-bf6f-ef2e850c448e","Type":"ContainerDied","Data":"579bda564fcb96ff1cb6392afa734d693107c0002126dba260fe72f2ef94ca4d"} Jan 30 12:23:39 crc kubenswrapper[4869]: I0130 12:23:39.075255 4869 scope.go:117] "RemoveContainer" containerID="83c800b6e05c53bdaff5c4aae49b478a9125cb6975ef3dcdc1b4e027c53907fd" Jan 30 12:23:39 crc kubenswrapper[4869]: I0130 12:23:39.102086 4869 scope.go:117] "RemoveContainer" containerID="e14cf9bf1480be42eec84570fd93a826871825f35e90175067ef6d4aaf66147b" Jan 30 12:23:39 crc kubenswrapper[4869]: I0130 12:23:39.105009 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fczbl"] Jan 30 12:23:39 crc kubenswrapper[4869]: I0130 12:23:39.112644 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fczbl"] Jan 30 12:23:39 crc kubenswrapper[4869]: I0130 12:23:39.128289 4869 scope.go:117] "RemoveContainer" containerID="6cfbc43446f458ab3b20b75301e684edc2eb034e6bb9432de46790791f49ccd0" Jan 30 12:23:39 crc kubenswrapper[4869]: I0130 12:23:39.168763 4869 scope.go:117] "RemoveContainer" containerID="83c800b6e05c53bdaff5c4aae49b478a9125cb6975ef3dcdc1b4e027c53907fd" Jan 30 12:23:39 crc kubenswrapper[4869]: E0130 12:23:39.169429 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83c800b6e05c53bdaff5c4aae49b478a9125cb6975ef3dcdc1b4e027c53907fd\": container with ID starting with 83c800b6e05c53bdaff5c4aae49b478a9125cb6975ef3dcdc1b4e027c53907fd not found: ID does not exist" containerID="83c800b6e05c53bdaff5c4aae49b478a9125cb6975ef3dcdc1b4e027c53907fd" Jan 30 12:23:39 crc kubenswrapper[4869]: I0130 12:23:39.169474 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83c800b6e05c53bdaff5c4aae49b478a9125cb6975ef3dcdc1b4e027c53907fd"} err="failed to get container status \"83c800b6e05c53bdaff5c4aae49b478a9125cb6975ef3dcdc1b4e027c53907fd\": rpc error: code = NotFound desc = could not find container \"83c800b6e05c53bdaff5c4aae49b478a9125cb6975ef3dcdc1b4e027c53907fd\": container with ID starting with 83c800b6e05c53bdaff5c4aae49b478a9125cb6975ef3dcdc1b4e027c53907fd not found: ID does not exist" Jan 30 12:23:39 crc kubenswrapper[4869]: I0130 12:23:39.169507 4869 scope.go:117] "RemoveContainer" containerID="e14cf9bf1480be42eec84570fd93a826871825f35e90175067ef6d4aaf66147b" Jan 30 12:23:39 crc kubenswrapper[4869]: E0130 12:23:39.169851 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e14cf9bf1480be42eec84570fd93a826871825f35e90175067ef6d4aaf66147b\": container with ID starting with e14cf9bf1480be42eec84570fd93a826871825f35e90175067ef6d4aaf66147b not found: ID does not exist" containerID="e14cf9bf1480be42eec84570fd93a826871825f35e90175067ef6d4aaf66147b" Jan 30 12:23:39 crc kubenswrapper[4869]: I0130 12:23:39.169964 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e14cf9bf1480be42eec84570fd93a826871825f35e90175067ef6d4aaf66147b"} err="failed to get container status \"e14cf9bf1480be42eec84570fd93a826871825f35e90175067ef6d4aaf66147b\": rpc error: code = NotFound desc = could not find container \"e14cf9bf1480be42eec84570fd93a826871825f35e90175067ef6d4aaf66147b\": container with ID starting with e14cf9bf1480be42eec84570fd93a826871825f35e90175067ef6d4aaf66147b not found: ID does not exist" Jan 30 12:23:39 crc kubenswrapper[4869]: I0130 12:23:39.170056 4869 scope.go:117] "RemoveContainer" containerID="6cfbc43446f458ab3b20b75301e684edc2eb034e6bb9432de46790791f49ccd0" Jan 30 12:23:39 crc kubenswrapper[4869]: E0130 12:23:39.170342 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6cfbc43446f458ab3b20b75301e684edc2eb034e6bb9432de46790791f49ccd0\": container with ID starting with 6cfbc43446f458ab3b20b75301e684edc2eb034e6bb9432de46790791f49ccd0 not found: ID does not exist" containerID="6cfbc43446f458ab3b20b75301e684edc2eb034e6bb9432de46790791f49ccd0" Jan 30 12:23:39 crc kubenswrapper[4869]: I0130 12:23:39.170368 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6cfbc43446f458ab3b20b75301e684edc2eb034e6bb9432de46790791f49ccd0"} err="failed to get container status \"6cfbc43446f458ab3b20b75301e684edc2eb034e6bb9432de46790791f49ccd0\": rpc error: code = NotFound desc = could not find container \"6cfbc43446f458ab3b20b75301e684edc2eb034e6bb9432de46790791f49ccd0\": container with ID starting with 6cfbc43446f458ab3b20b75301e684edc2eb034e6bb9432de46790791f49ccd0 not found: ID does not exist" Jan 30 12:23:40 crc kubenswrapper[4869]: I0130 12:23:40.143814 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88cb91b0-6772-433c-bf6f-ef2e850c448e" path="/var/lib/kubelet/pods/88cb91b0-6772-433c-bf6f-ef2e850c448e/volumes" Jan 30 12:23:42 crc kubenswrapper[4869]: I0130 12:23:42.487017 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6wgxd" Jan 30 12:23:42 crc kubenswrapper[4869]: I0130 12:23:42.487673 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6wgxd" Jan 30 12:23:43 crc kubenswrapper[4869]: I0130 12:23:43.133742 4869 scope.go:117] "RemoveContainer" containerID="eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" Jan 30 12:23:43 crc kubenswrapper[4869]: E0130 12:23:43.134011 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:23:43 crc kubenswrapper[4869]: I0130 12:23:43.539472 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6wgxd" podUID="af961768-4f28-48fa-98e3-b0219133a41a" containerName="registry-server" probeResult="failure" output=< Jan 30 12:23:43 crc kubenswrapper[4869]: timeout: failed to connect service ":50051" within 1s Jan 30 12:23:43 crc kubenswrapper[4869]: > Jan 30 12:23:52 crc kubenswrapper[4869]: I0130 12:23:52.543093 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6wgxd" Jan 30 12:23:52 crc kubenswrapper[4869]: I0130 12:23:52.595190 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6wgxd" Jan 30 12:23:52 crc kubenswrapper[4869]: I0130 12:23:52.781039 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6wgxd"] Jan 30 12:23:54 crc kubenswrapper[4869]: I0130 12:23:54.210772 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6wgxd" podUID="af961768-4f28-48fa-98e3-b0219133a41a" containerName="registry-server" containerID="cri-o://1cf9ceaa3dc63d9adb4a56f1d55dffea780b5a23bc678f409d0c0872765c79d1" gracePeriod=2 Jan 30 12:23:54 crc kubenswrapper[4869]: I0130 12:23:54.634617 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6wgxd" Jan 30 12:23:54 crc kubenswrapper[4869]: I0130 12:23:54.752888 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af961768-4f28-48fa-98e3-b0219133a41a-catalog-content\") pod \"af961768-4f28-48fa-98e3-b0219133a41a\" (UID: \"af961768-4f28-48fa-98e3-b0219133a41a\") " Jan 30 12:23:54 crc kubenswrapper[4869]: I0130 12:23:54.753015 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j8f7z\" (UniqueName: \"kubernetes.io/projected/af961768-4f28-48fa-98e3-b0219133a41a-kube-api-access-j8f7z\") pod \"af961768-4f28-48fa-98e3-b0219133a41a\" (UID: \"af961768-4f28-48fa-98e3-b0219133a41a\") " Jan 30 12:23:54 crc kubenswrapper[4869]: I0130 12:23:54.753073 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af961768-4f28-48fa-98e3-b0219133a41a-utilities\") pod \"af961768-4f28-48fa-98e3-b0219133a41a\" (UID: \"af961768-4f28-48fa-98e3-b0219133a41a\") " Jan 30 12:23:54 crc kubenswrapper[4869]: I0130 12:23:54.754107 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af961768-4f28-48fa-98e3-b0219133a41a-utilities" (OuterVolumeSpecName: "utilities") pod "af961768-4f28-48fa-98e3-b0219133a41a" (UID: "af961768-4f28-48fa-98e3-b0219133a41a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 12:23:54 crc kubenswrapper[4869]: I0130 12:23:54.759655 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af961768-4f28-48fa-98e3-b0219133a41a-kube-api-access-j8f7z" (OuterVolumeSpecName: "kube-api-access-j8f7z") pod "af961768-4f28-48fa-98e3-b0219133a41a" (UID: "af961768-4f28-48fa-98e3-b0219133a41a"). InnerVolumeSpecName "kube-api-access-j8f7z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:23:54 crc kubenswrapper[4869]: I0130 12:23:54.854524 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j8f7z\" (UniqueName: \"kubernetes.io/projected/af961768-4f28-48fa-98e3-b0219133a41a-kube-api-access-j8f7z\") on node \"crc\" DevicePath \"\"" Jan 30 12:23:54 crc kubenswrapper[4869]: I0130 12:23:54.854569 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af961768-4f28-48fa-98e3-b0219133a41a-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 12:23:54 crc kubenswrapper[4869]: I0130 12:23:54.880721 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af961768-4f28-48fa-98e3-b0219133a41a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "af961768-4f28-48fa-98e3-b0219133a41a" (UID: "af961768-4f28-48fa-98e3-b0219133a41a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 12:23:54 crc kubenswrapper[4869]: I0130 12:23:54.956247 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af961768-4f28-48fa-98e3-b0219133a41a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 12:23:55 crc kubenswrapper[4869]: I0130 12:23:55.220610 4869 generic.go:334] "Generic (PLEG): container finished" podID="af961768-4f28-48fa-98e3-b0219133a41a" containerID="1cf9ceaa3dc63d9adb4a56f1d55dffea780b5a23bc678f409d0c0872765c79d1" exitCode=0 Jan 30 12:23:55 crc kubenswrapper[4869]: I0130 12:23:55.220683 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6wgxd" Jan 30 12:23:55 crc kubenswrapper[4869]: I0130 12:23:55.220724 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6wgxd" event={"ID":"af961768-4f28-48fa-98e3-b0219133a41a","Type":"ContainerDied","Data":"1cf9ceaa3dc63d9adb4a56f1d55dffea780b5a23bc678f409d0c0872765c79d1"} Jan 30 12:23:55 crc kubenswrapper[4869]: I0130 12:23:55.220779 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6wgxd" event={"ID":"af961768-4f28-48fa-98e3-b0219133a41a","Type":"ContainerDied","Data":"44b6712fd92771f263e12d7dd14291fd3fb782d886647f82c48abb4923d9fb24"} Jan 30 12:23:55 crc kubenswrapper[4869]: I0130 12:23:55.220802 4869 scope.go:117] "RemoveContainer" containerID="1cf9ceaa3dc63d9adb4a56f1d55dffea780b5a23bc678f409d0c0872765c79d1" Jan 30 12:23:55 crc kubenswrapper[4869]: I0130 12:23:55.238550 4869 scope.go:117] "RemoveContainer" containerID="05e3d4bde39c8261c6b8401db3a4917e33a24da753f6f666ead4815e9669c1d6" Jan 30 12:23:55 crc kubenswrapper[4869]: I0130 12:23:55.268563 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6wgxd"] Jan 30 12:23:55 crc kubenswrapper[4869]: I0130 12:23:55.276977 4869 scope.go:117] "RemoveContainer" containerID="deba7d7c335a5dbe9684e4e9026755fef7d2563ede5de72c391d669964f2e83c" Jan 30 12:23:55 crc kubenswrapper[4869]: I0130 12:23:55.281978 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6wgxd"] Jan 30 12:23:55 crc kubenswrapper[4869]: I0130 12:23:55.299988 4869 scope.go:117] "RemoveContainer" containerID="1cf9ceaa3dc63d9adb4a56f1d55dffea780b5a23bc678f409d0c0872765c79d1" Jan 30 12:23:55 crc kubenswrapper[4869]: E0130 12:23:55.300451 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1cf9ceaa3dc63d9adb4a56f1d55dffea780b5a23bc678f409d0c0872765c79d1\": container with ID starting with 1cf9ceaa3dc63d9adb4a56f1d55dffea780b5a23bc678f409d0c0872765c79d1 not found: ID does not exist" containerID="1cf9ceaa3dc63d9adb4a56f1d55dffea780b5a23bc678f409d0c0872765c79d1" Jan 30 12:23:55 crc kubenswrapper[4869]: I0130 12:23:55.300491 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cf9ceaa3dc63d9adb4a56f1d55dffea780b5a23bc678f409d0c0872765c79d1"} err="failed to get container status \"1cf9ceaa3dc63d9adb4a56f1d55dffea780b5a23bc678f409d0c0872765c79d1\": rpc error: code = NotFound desc = could not find container \"1cf9ceaa3dc63d9adb4a56f1d55dffea780b5a23bc678f409d0c0872765c79d1\": container with ID starting with 1cf9ceaa3dc63d9adb4a56f1d55dffea780b5a23bc678f409d0c0872765c79d1 not found: ID does not exist" Jan 30 12:23:55 crc kubenswrapper[4869]: I0130 12:23:55.300537 4869 scope.go:117] "RemoveContainer" containerID="05e3d4bde39c8261c6b8401db3a4917e33a24da753f6f666ead4815e9669c1d6" Jan 30 12:23:55 crc kubenswrapper[4869]: E0130 12:23:55.301005 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05e3d4bde39c8261c6b8401db3a4917e33a24da753f6f666ead4815e9669c1d6\": container with ID starting with 05e3d4bde39c8261c6b8401db3a4917e33a24da753f6f666ead4815e9669c1d6 not found: ID does not exist" containerID="05e3d4bde39c8261c6b8401db3a4917e33a24da753f6f666ead4815e9669c1d6" Jan 30 12:23:55 crc kubenswrapper[4869]: I0130 12:23:55.301025 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05e3d4bde39c8261c6b8401db3a4917e33a24da753f6f666ead4815e9669c1d6"} err="failed to get container status \"05e3d4bde39c8261c6b8401db3a4917e33a24da753f6f666ead4815e9669c1d6\": rpc error: code = NotFound desc = could not find container \"05e3d4bde39c8261c6b8401db3a4917e33a24da753f6f666ead4815e9669c1d6\": container with ID starting with 05e3d4bde39c8261c6b8401db3a4917e33a24da753f6f666ead4815e9669c1d6 not found: ID does not exist" Jan 30 12:23:55 crc kubenswrapper[4869]: I0130 12:23:55.301040 4869 scope.go:117] "RemoveContainer" containerID="deba7d7c335a5dbe9684e4e9026755fef7d2563ede5de72c391d669964f2e83c" Jan 30 12:23:55 crc kubenswrapper[4869]: E0130 12:23:55.301247 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"deba7d7c335a5dbe9684e4e9026755fef7d2563ede5de72c391d669964f2e83c\": container with ID starting with deba7d7c335a5dbe9684e4e9026755fef7d2563ede5de72c391d669964f2e83c not found: ID does not exist" containerID="deba7d7c335a5dbe9684e4e9026755fef7d2563ede5de72c391d669964f2e83c" Jan 30 12:23:55 crc kubenswrapper[4869]: I0130 12:23:55.301283 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"deba7d7c335a5dbe9684e4e9026755fef7d2563ede5de72c391d669964f2e83c"} err="failed to get container status \"deba7d7c335a5dbe9684e4e9026755fef7d2563ede5de72c391d669964f2e83c\": rpc error: code = NotFound desc = could not find container \"deba7d7c335a5dbe9684e4e9026755fef7d2563ede5de72c391d669964f2e83c\": container with ID starting with deba7d7c335a5dbe9684e4e9026755fef7d2563ede5de72c391d669964f2e83c not found: ID does not exist" Jan 30 12:23:56 crc kubenswrapper[4869]: I0130 12:23:56.133213 4869 scope.go:117] "RemoveContainer" containerID="eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" Jan 30 12:23:56 crc kubenswrapper[4869]: E0130 12:23:56.133595 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:23:56 crc kubenswrapper[4869]: I0130 12:23:56.175426 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af961768-4f28-48fa-98e3-b0219133a41a" path="/var/lib/kubelet/pods/af961768-4f28-48fa-98e3-b0219133a41a/volumes" Jan 30 12:24:05 crc kubenswrapper[4869]: I0130 12:24:05.041238 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-v8k8c_4879f7b2-b049-4a47-8d11-0868667299a6/kube-rbac-proxy/0.log" Jan 30 12:24:05 crc kubenswrapper[4869]: I0130 12:24:05.321473 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-v6jfd_9a5abb53-8c8c-4293-a0ac-594d43f9b703/cp-frr-files/0.log" Jan 30 12:24:05 crc kubenswrapper[4869]: I0130 12:24:05.344596 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-v8k8c_4879f7b2-b049-4a47-8d11-0868667299a6/controller/0.log" Jan 30 12:24:05 crc kubenswrapper[4869]: I0130 12:24:05.595307 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-v6jfd_9a5abb53-8c8c-4293-a0ac-594d43f9b703/cp-reloader/0.log" Jan 30 12:24:05 crc kubenswrapper[4869]: I0130 12:24:05.595325 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-v6jfd_9a5abb53-8c8c-4293-a0ac-594d43f9b703/cp-frr-files/0.log" Jan 30 12:24:05 crc kubenswrapper[4869]: I0130 12:24:05.595924 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-v6jfd_9a5abb53-8c8c-4293-a0ac-594d43f9b703/cp-reloader/0.log" Jan 30 12:24:05 crc kubenswrapper[4869]: I0130 12:24:05.620175 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-v6jfd_9a5abb53-8c8c-4293-a0ac-594d43f9b703/cp-metrics/0.log" Jan 30 12:24:05 crc kubenswrapper[4869]: I0130 12:24:05.795178 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-v6jfd_9a5abb53-8c8c-4293-a0ac-594d43f9b703/cp-frr-files/0.log" Jan 30 12:24:05 crc kubenswrapper[4869]: I0130 12:24:05.806927 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-v6jfd_9a5abb53-8c8c-4293-a0ac-594d43f9b703/cp-metrics/0.log" Jan 30 12:24:05 crc kubenswrapper[4869]: I0130 12:24:05.812040 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-v6jfd_9a5abb53-8c8c-4293-a0ac-594d43f9b703/cp-reloader/0.log" Jan 30 12:24:05 crc kubenswrapper[4869]: I0130 12:24:05.877886 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-v6jfd_9a5abb53-8c8c-4293-a0ac-594d43f9b703/cp-metrics/0.log" Jan 30 12:24:06 crc kubenswrapper[4869]: I0130 12:24:06.049348 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-v6jfd_9a5abb53-8c8c-4293-a0ac-594d43f9b703/cp-metrics/0.log" Jan 30 12:24:06 crc kubenswrapper[4869]: I0130 12:24:06.053770 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-v6jfd_9a5abb53-8c8c-4293-a0ac-594d43f9b703/cp-frr-files/0.log" Jan 30 12:24:06 crc kubenswrapper[4869]: I0130 12:24:06.069937 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-v6jfd_9a5abb53-8c8c-4293-a0ac-594d43f9b703/cp-reloader/0.log" Jan 30 12:24:06 crc kubenswrapper[4869]: I0130 12:24:06.101079 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-v6jfd_9a5abb53-8c8c-4293-a0ac-594d43f9b703/controller/0.log" Jan 30 12:24:06 crc kubenswrapper[4869]: I0130 12:24:06.226052 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-v6jfd_9a5abb53-8c8c-4293-a0ac-594d43f9b703/kube-rbac-proxy/0.log" Jan 30 12:24:06 crc kubenswrapper[4869]: I0130 12:24:06.240764 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-v6jfd_9a5abb53-8c8c-4293-a0ac-594d43f9b703/frr-metrics/0.log" Jan 30 12:24:06 crc kubenswrapper[4869]: I0130 12:24:06.334213 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-v6jfd_9a5abb53-8c8c-4293-a0ac-594d43f9b703/kube-rbac-proxy-frr/0.log" Jan 30 12:24:06 crc kubenswrapper[4869]: I0130 12:24:06.470881 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-v6jfd_9a5abb53-8c8c-4293-a0ac-594d43f9b703/reloader/0.log" Jan 30 12:24:06 crc kubenswrapper[4869]: I0130 12:24:06.542263 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-fwqrp_50962bee-f856-4fd9-95f5-4b697b0212f2/frr-k8s-webhook-server/0.log" Jan 30 12:24:06 crc kubenswrapper[4869]: I0130 12:24:06.797169 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-b456d8f47-lgc55_0e2c40c0-e880-4c08-bb45-037f69a35fa9/manager/0.log" Jan 30 12:24:06 crc kubenswrapper[4869]: I0130 12:24:06.965140 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-67776d4d6b-k4bgw_e3c75ec9-dae6-418e-8ea0-3d0ab1c8d1a4/webhook-server/0.log" Jan 30 12:24:07 crc kubenswrapper[4869]: I0130 12:24:07.050162 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-tbmhh_a8bd3be4-3c24-4856-a0bb-3efe3d9f716f/kube-rbac-proxy/0.log" Jan 30 12:24:07 crc kubenswrapper[4869]: I0130 12:24:07.766663 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-tbmhh_a8bd3be4-3c24-4856-a0bb-3efe3d9f716f/speaker/0.log" Jan 30 12:24:08 crc kubenswrapper[4869]: I0130 12:24:08.003646 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-v6jfd_9a5abb53-8c8c-4293-a0ac-594d43f9b703/frr/0.log" Jan 30 12:24:10 crc kubenswrapper[4869]: I0130 12:24:10.137135 4869 scope.go:117] "RemoveContainer" containerID="eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" Jan 30 12:24:10 crc kubenswrapper[4869]: E0130 12:24:10.137564 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:24:16 crc kubenswrapper[4869]: I0130 12:24:16.236530 4869 scope.go:117] "RemoveContainer" containerID="5709ba87e38dbf851bee4108d3f12b5099b2ad44157a3a1e37d90670a644c0bb" Jan 30 12:24:16 crc kubenswrapper[4869]: I0130 12:24:16.257343 4869 scope.go:117] "RemoveContainer" containerID="ed8d9df2e746e12a06a489338c408cd79c4eb81166a0848d0632ebd192c565cf" Jan 30 12:24:16 crc kubenswrapper[4869]: I0130 12:24:16.293210 4869 scope.go:117] "RemoveContainer" containerID="bdabdc64213bb2dbb01c863ab4ef1f268441f2371549453363593e4f4b1cd2c8" Jan 30 12:24:20 crc kubenswrapper[4869]: I0130 12:24:20.387383 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t_d77c8e13-cf19-438b-9d62-575041c50699/util/0.log" Jan 30 12:24:20 crc kubenswrapper[4869]: I0130 12:24:20.544938 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t_d77c8e13-cf19-438b-9d62-575041c50699/util/0.log" Jan 30 12:24:20 crc kubenswrapper[4869]: I0130 12:24:20.579315 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t_d77c8e13-cf19-438b-9d62-575041c50699/pull/0.log" Jan 30 12:24:20 crc kubenswrapper[4869]: I0130 12:24:20.591392 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t_d77c8e13-cf19-438b-9d62-575041c50699/pull/0.log" Jan 30 12:24:20 crc kubenswrapper[4869]: I0130 12:24:20.774904 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t_d77c8e13-cf19-438b-9d62-575041c50699/util/0.log" Jan 30 12:24:20 crc kubenswrapper[4869]: I0130 12:24:20.781983 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t_d77c8e13-cf19-438b-9d62-575041c50699/pull/0.log" Jan 30 12:24:20 crc kubenswrapper[4869]: I0130 12:24:20.962231 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcps92t_d77c8e13-cf19-438b-9d62-575041c50699/extract/0.log" Jan 30 12:24:21 crc kubenswrapper[4869]: I0130 12:24:21.088088 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf_ed645f02-420a-49d0-8228-02df41ef2808/util/0.log" Jan 30 12:24:21 crc kubenswrapper[4869]: I0130 12:24:21.253093 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf_ed645f02-420a-49d0-8228-02df41ef2808/pull/0.log" Jan 30 12:24:21 crc kubenswrapper[4869]: I0130 12:24:21.287137 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf_ed645f02-420a-49d0-8228-02df41ef2808/pull/0.log" Jan 30 12:24:21 crc kubenswrapper[4869]: I0130 12:24:21.290227 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf_ed645f02-420a-49d0-8228-02df41ef2808/util/0.log" Jan 30 12:24:21 crc kubenswrapper[4869]: I0130 12:24:21.464207 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf_ed645f02-420a-49d0-8228-02df41ef2808/extract/0.log" Jan 30 12:24:21 crc kubenswrapper[4869]: I0130 12:24:21.464596 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf_ed645f02-420a-49d0-8228-02df41ef2808/pull/0.log" Jan 30 12:24:21 crc kubenswrapper[4869]: I0130 12:24:21.518773 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71328bqf_ed645f02-420a-49d0-8228-02df41ef2808/util/0.log" Jan 30 12:24:21 crc kubenswrapper[4869]: I0130 12:24:21.672964 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4_2743e133-1c70-48f2-aa22-6c80e628699d/util/0.log" Jan 30 12:24:21 crc kubenswrapper[4869]: I0130 12:24:21.871364 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4_2743e133-1c70-48f2-aa22-6c80e628699d/pull/0.log" Jan 30 12:24:21 crc kubenswrapper[4869]: I0130 12:24:21.894113 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4_2743e133-1c70-48f2-aa22-6c80e628699d/util/0.log" Jan 30 12:24:21 crc kubenswrapper[4869]: I0130 12:24:21.960510 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4_2743e133-1c70-48f2-aa22-6c80e628699d/pull/0.log" Jan 30 12:24:22 crc kubenswrapper[4869]: I0130 12:24:22.077462 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4_2743e133-1c70-48f2-aa22-6c80e628699d/util/0.log" Jan 30 12:24:22 crc kubenswrapper[4869]: I0130 12:24:22.136925 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4_2743e133-1c70-48f2-aa22-6c80e628699d/extract/0.log" Jan 30 12:24:22 crc kubenswrapper[4869]: I0130 12:24:22.153823 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5dnhn4_2743e133-1c70-48f2-aa22-6c80e628699d/pull/0.log" Jan 30 12:24:22 crc kubenswrapper[4869]: I0130 12:24:22.275686 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gkx59_b2b96b64-9445-48fe-bd76-e1a23f647129/extract-utilities/0.log" Jan 30 12:24:22 crc kubenswrapper[4869]: I0130 12:24:22.474528 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gkx59_b2b96b64-9445-48fe-bd76-e1a23f647129/extract-content/0.log" Jan 30 12:24:22 crc kubenswrapper[4869]: I0130 12:24:22.499463 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gkx59_b2b96b64-9445-48fe-bd76-e1a23f647129/extract-content/0.log" Jan 30 12:24:22 crc kubenswrapper[4869]: I0130 12:24:22.518449 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gkx59_b2b96b64-9445-48fe-bd76-e1a23f647129/extract-utilities/0.log" Jan 30 12:24:22 crc kubenswrapper[4869]: I0130 12:24:22.635573 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gkx59_b2b96b64-9445-48fe-bd76-e1a23f647129/extract-content/0.log" Jan 30 12:24:22 crc kubenswrapper[4869]: I0130 12:24:22.684922 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gkx59_b2b96b64-9445-48fe-bd76-e1a23f647129/extract-utilities/0.log" Jan 30 12:24:22 crc kubenswrapper[4869]: I0130 12:24:22.865151 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-mf465_d787629b-df65-406a-8890-2a65d18ce8fd/extract-utilities/0.log" Jan 30 12:24:23 crc kubenswrapper[4869]: I0130 12:24:23.057473 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-mf465_d787629b-df65-406a-8890-2a65d18ce8fd/extract-utilities/0.log" Jan 30 12:24:23 crc kubenswrapper[4869]: I0130 12:24:23.117026 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-mf465_d787629b-df65-406a-8890-2a65d18ce8fd/extract-content/0.log" Jan 30 12:24:23 crc kubenswrapper[4869]: I0130 12:24:23.164627 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-mf465_d787629b-df65-406a-8890-2a65d18ce8fd/extract-content/0.log" Jan 30 12:24:23 crc kubenswrapper[4869]: I0130 12:24:23.436104 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-mf465_d787629b-df65-406a-8890-2a65d18ce8fd/extract-content/0.log" Jan 30 12:24:23 crc kubenswrapper[4869]: I0130 12:24:23.454383 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-mf465_d787629b-df65-406a-8890-2a65d18ce8fd/extract-utilities/0.log" Jan 30 12:24:23 crc kubenswrapper[4869]: I0130 12:24:23.691512 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-d68sd_98cb9d90-57ea-4bf2-8ee4-dbcf18e79293/marketplace-operator/3.log" Jan 30 12:24:23 crc kubenswrapper[4869]: I0130 12:24:23.715624 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gkx59_b2b96b64-9445-48fe-bd76-e1a23f647129/registry-server/0.log" Jan 30 12:24:23 crc kubenswrapper[4869]: I0130 12:24:23.870779 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-d68sd_98cb9d90-57ea-4bf2-8ee4-dbcf18e79293/marketplace-operator/2.log" Jan 30 12:24:23 crc kubenswrapper[4869]: I0130 12:24:23.953830 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-t99sp_e9691505-782f-4384-ae50-7cd13749bfde/extract-utilities/0.log" Jan 30 12:24:24 crc kubenswrapper[4869]: I0130 12:24:24.136040 4869 scope.go:117] "RemoveContainer" containerID="eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" Jan 30 12:24:24 crc kubenswrapper[4869]: E0130 12:24:24.136254 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:24:24 crc kubenswrapper[4869]: I0130 12:24:24.225278 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-t99sp_e9691505-782f-4384-ae50-7cd13749bfde/extract-content/0.log" Jan 30 12:24:24 crc kubenswrapper[4869]: I0130 12:24:24.250980 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-t99sp_e9691505-782f-4384-ae50-7cd13749bfde/extract-utilities/0.log" Jan 30 12:24:24 crc kubenswrapper[4869]: I0130 12:24:24.300658 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-t99sp_e9691505-782f-4384-ae50-7cd13749bfde/extract-content/0.log" Jan 30 12:24:24 crc kubenswrapper[4869]: I0130 12:24:24.336201 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-mf465_d787629b-df65-406a-8890-2a65d18ce8fd/registry-server/0.log" Jan 30 12:24:24 crc kubenswrapper[4869]: I0130 12:24:24.516408 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-t99sp_e9691505-782f-4384-ae50-7cd13749bfde/extract-content/0.log" Jan 30 12:24:24 crc kubenswrapper[4869]: I0130 12:24:24.520875 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-t99sp_e9691505-782f-4384-ae50-7cd13749bfde/extract-utilities/0.log" Jan 30 12:24:24 crc kubenswrapper[4869]: I0130 12:24:24.714042 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-t99sp_e9691505-782f-4384-ae50-7cd13749bfde/registry-server/0.log" Jan 30 12:24:24 crc kubenswrapper[4869]: I0130 12:24:24.832265 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-72xfv_52f6563d-9a78-4b18-b8a8-5505d2874a84/extract-utilities/0.log" Jan 30 12:24:25 crc kubenswrapper[4869]: I0130 12:24:25.027408 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-72xfv_52f6563d-9a78-4b18-b8a8-5505d2874a84/extract-utilities/0.log" Jan 30 12:24:25 crc kubenswrapper[4869]: I0130 12:24:25.048241 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-72xfv_52f6563d-9a78-4b18-b8a8-5505d2874a84/extract-content/0.log" Jan 30 12:24:25 crc kubenswrapper[4869]: I0130 12:24:25.067848 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-72xfv_52f6563d-9a78-4b18-b8a8-5505d2874a84/extract-content/0.log" Jan 30 12:24:25 crc kubenswrapper[4869]: I0130 12:24:25.261081 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-72xfv_52f6563d-9a78-4b18-b8a8-5505d2874a84/extract-content/0.log" Jan 30 12:24:25 crc kubenswrapper[4869]: I0130 12:24:25.269750 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-72xfv_52f6563d-9a78-4b18-b8a8-5505d2874a84/extract-utilities/0.log" Jan 30 12:24:25 crc kubenswrapper[4869]: I0130 12:24:25.489467 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-72xfv_52f6563d-9a78-4b18-b8a8-5505d2874a84/registry-server/0.log" Jan 30 12:24:38 crc kubenswrapper[4869]: I0130 12:24:38.132761 4869 scope.go:117] "RemoveContainer" containerID="eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" Jan 30 12:24:38 crc kubenswrapper[4869]: E0130 12:24:38.133403 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:24:50 crc kubenswrapper[4869]: I0130 12:24:50.138320 4869 scope.go:117] "RemoveContainer" containerID="eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" Jan 30 12:24:50 crc kubenswrapper[4869]: E0130 12:24:50.139189 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:25:02 crc kubenswrapper[4869]: I0130 12:25:02.133001 4869 scope.go:117] "RemoveContainer" containerID="eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" Jan 30 12:25:02 crc kubenswrapper[4869]: E0130 12:25:02.133698 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:25:17 crc kubenswrapper[4869]: I0130 12:25:17.133470 4869 scope.go:117] "RemoveContainer" containerID="eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" Jan 30 12:25:17 crc kubenswrapper[4869]: E0130 12:25:17.134444 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:25:32 crc kubenswrapper[4869]: I0130 12:25:32.133289 4869 scope.go:117] "RemoveContainer" containerID="eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" Jan 30 12:25:32 crc kubenswrapper[4869]: E0130 12:25:32.134492 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:25:45 crc kubenswrapper[4869]: I0130 12:25:45.132869 4869 scope.go:117] "RemoveContainer" containerID="eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" Jan 30 12:25:45 crc kubenswrapper[4869]: E0130 12:25:45.133898 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:25:59 crc kubenswrapper[4869]: I0130 12:25:59.133180 4869 scope.go:117] "RemoveContainer" containerID="eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" Jan 30 12:25:59 crc kubenswrapper[4869]: E0130 12:25:59.134080 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:26:01 crc kubenswrapper[4869]: I0130 12:26:01.254020 4869 generic.go:334] "Generic (PLEG): container finished" podID="0654b0c4-7447-4b20-abf9-c7978f7481da" containerID="778717c4c6abd6ac4a11a2a39620e5b2925257a2c286f46727a1afef91a80f95" exitCode=0 Jan 30 12:26:01 crc kubenswrapper[4869]: I0130 12:26:01.254140 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kzp9s/must-gather-cjntw" event={"ID":"0654b0c4-7447-4b20-abf9-c7978f7481da","Type":"ContainerDied","Data":"778717c4c6abd6ac4a11a2a39620e5b2925257a2c286f46727a1afef91a80f95"} Jan 30 12:26:01 crc kubenswrapper[4869]: I0130 12:26:01.255025 4869 scope.go:117] "RemoveContainer" containerID="778717c4c6abd6ac4a11a2a39620e5b2925257a2c286f46727a1afef91a80f95" Jan 30 12:26:02 crc kubenswrapper[4869]: I0130 12:26:02.011845 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-kzp9s_must-gather-cjntw_0654b0c4-7447-4b20-abf9-c7978f7481da/gather/0.log" Jan 30 12:26:08 crc kubenswrapper[4869]: I0130 12:26:08.941301 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-kzp9s/must-gather-cjntw"] Jan 30 12:26:08 crc kubenswrapper[4869]: I0130 12:26:08.943119 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-kzp9s/must-gather-cjntw" podUID="0654b0c4-7447-4b20-abf9-c7978f7481da" containerName="copy" containerID="cri-o://488ac7a5500e6c34e1ac9ef9d6a5cca571580240de03c96ca59c8a2315d95d0f" gracePeriod=2 Jan 30 12:26:08 crc kubenswrapper[4869]: I0130 12:26:08.952473 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-kzp9s/must-gather-cjntw"] Jan 30 12:26:09 crc kubenswrapper[4869]: I0130 12:26:09.318788 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-kzp9s_must-gather-cjntw_0654b0c4-7447-4b20-abf9-c7978f7481da/copy/0.log" Jan 30 12:26:09 crc kubenswrapper[4869]: I0130 12:26:09.319496 4869 generic.go:334] "Generic (PLEG): container finished" podID="0654b0c4-7447-4b20-abf9-c7978f7481da" containerID="488ac7a5500e6c34e1ac9ef9d6a5cca571580240de03c96ca59c8a2315d95d0f" exitCode=143 Jan 30 12:26:09 crc kubenswrapper[4869]: I0130 12:26:09.319583 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6aba0a7203dd88ba9ac88cd96959574813932355b09363f270f39a72aa7f1e65" Jan 30 12:26:09 crc kubenswrapper[4869]: I0130 12:26:09.330007 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-kzp9s_must-gather-cjntw_0654b0c4-7447-4b20-abf9-c7978f7481da/copy/0.log" Jan 30 12:26:09 crc kubenswrapper[4869]: I0130 12:26:09.330429 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kzp9s/must-gather-cjntw" Jan 30 12:26:09 crc kubenswrapper[4869]: I0130 12:26:09.477136 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/0654b0c4-7447-4b20-abf9-c7978f7481da-must-gather-output\") pod \"0654b0c4-7447-4b20-abf9-c7978f7481da\" (UID: \"0654b0c4-7447-4b20-abf9-c7978f7481da\") " Jan 30 12:26:09 crc kubenswrapper[4869]: I0130 12:26:09.477336 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmd9n\" (UniqueName: \"kubernetes.io/projected/0654b0c4-7447-4b20-abf9-c7978f7481da-kube-api-access-dmd9n\") pod \"0654b0c4-7447-4b20-abf9-c7978f7481da\" (UID: \"0654b0c4-7447-4b20-abf9-c7978f7481da\") " Jan 30 12:26:09 crc kubenswrapper[4869]: I0130 12:26:09.485475 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0654b0c4-7447-4b20-abf9-c7978f7481da-kube-api-access-dmd9n" (OuterVolumeSpecName: "kube-api-access-dmd9n") pod "0654b0c4-7447-4b20-abf9-c7978f7481da" (UID: "0654b0c4-7447-4b20-abf9-c7978f7481da"). InnerVolumeSpecName "kube-api-access-dmd9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 12:26:09 crc kubenswrapper[4869]: I0130 12:26:09.581445 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmd9n\" (UniqueName: \"kubernetes.io/projected/0654b0c4-7447-4b20-abf9-c7978f7481da-kube-api-access-dmd9n\") on node \"crc\" DevicePath \"\"" Jan 30 12:26:09 crc kubenswrapper[4869]: I0130 12:26:09.594471 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0654b0c4-7447-4b20-abf9-c7978f7481da-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "0654b0c4-7447-4b20-abf9-c7978f7481da" (UID: "0654b0c4-7447-4b20-abf9-c7978f7481da"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 12:26:09 crc kubenswrapper[4869]: I0130 12:26:09.682928 4869 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/0654b0c4-7447-4b20-abf9-c7978f7481da-must-gather-output\") on node \"crc\" DevicePath \"\"" Jan 30 12:26:10 crc kubenswrapper[4869]: I0130 12:26:10.139336 4869 scope.go:117] "RemoveContainer" containerID="eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" Jan 30 12:26:10 crc kubenswrapper[4869]: E0130 12:26:10.139841 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:26:10 crc kubenswrapper[4869]: I0130 12:26:10.153606 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0654b0c4-7447-4b20-abf9-c7978f7481da" path="/var/lib/kubelet/pods/0654b0c4-7447-4b20-abf9-c7978f7481da/volumes" Jan 30 12:26:10 crc kubenswrapper[4869]: I0130 12:26:10.327413 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kzp9s/must-gather-cjntw" Jan 30 12:26:24 crc kubenswrapper[4869]: I0130 12:26:24.133460 4869 scope.go:117] "RemoveContainer" containerID="eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" Jan 30 12:26:24 crc kubenswrapper[4869]: E0130 12:26:24.134377 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:26:37 crc kubenswrapper[4869]: I0130 12:26:37.133798 4869 scope.go:117] "RemoveContainer" containerID="eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" Jan 30 12:26:37 crc kubenswrapper[4869]: E0130 12:26:37.134713 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:26:48 crc kubenswrapper[4869]: I0130 12:26:48.147846 4869 scope.go:117] "RemoveContainer" containerID="eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" Jan 30 12:26:48 crc kubenswrapper[4869]: E0130 12:26:48.149671 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:26:59 crc kubenswrapper[4869]: I0130 12:26:59.132813 4869 scope.go:117] "RemoveContainer" containerID="eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" Jan 30 12:26:59 crc kubenswrapper[4869]: E0130 12:26:59.133765 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:27:13 crc kubenswrapper[4869]: I0130 12:27:13.133380 4869 scope.go:117] "RemoveContainer" containerID="eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" Jan 30 12:27:13 crc kubenswrapper[4869]: E0130 12:27:13.134737 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-99lr2_openshift-machine-config-operator(ef13186b-7f82-4025-97e3-d899be8c207f)\"" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" podUID="ef13186b-7f82-4025-97e3-d899be8c207f" Jan 30 12:27:25 crc kubenswrapper[4869]: I0130 12:27:25.133352 4869 scope.go:117] "RemoveContainer" containerID="eafb675131afe0b400f06a12d8c8da4a08c4df49efcfac66ca5dc2b2ce3a64a6" Jan 30 12:27:25 crc kubenswrapper[4869]: I0130 12:27:25.939860 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-99lr2" event={"ID":"ef13186b-7f82-4025-97e3-d899be8c207f","Type":"ContainerStarted","Data":"5603e238bd04564780f6927546933c5c7ad47511f6a21768782ea8ed4cbea896"} Jan 30 12:28:02 crc kubenswrapper[4869]: I0130 12:28:02.042912 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-skkxw"] Jan 30 12:28:02 crc kubenswrapper[4869]: I0130 12:28:02.055986 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-skkxw"] Jan 30 12:28:02 crc kubenswrapper[4869]: I0130 12:28:02.067078 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-9a6e-account-create-update-fsr8d"] Jan 30 12:28:02 crc kubenswrapper[4869]: I0130 12:28:02.075409 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-9a6e-account-create-update-fsr8d"] Jan 30 12:28:02 crc kubenswrapper[4869]: I0130 12:28:02.144624 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5db9e4f5-3a4b-4021-b569-db288f1501f0" path="/var/lib/kubelet/pods/5db9e4f5-3a4b-4021-b569-db288f1501f0/volumes" Jan 30 12:28:02 crc kubenswrapper[4869]: I0130 12:28:02.145209 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9bf3ebb4-ddb0-4221-a918-11657d547507" path="/var/lib/kubelet/pods/9bf3ebb4-ddb0-4221-a918-11657d547507/volumes" Jan 30 12:28:08 crc kubenswrapper[4869]: I0130 12:28:08.032869 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-sh22r"] Jan 30 12:28:08 crc kubenswrapper[4869]: I0130 12:28:08.038693 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-sh22r"] Jan 30 12:28:08 crc kubenswrapper[4869]: I0130 12:28:08.142564 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b06a4bb-d363-4877-a91a-e42d56568285" path="/var/lib/kubelet/pods/9b06a4bb-d363-4877-a91a-e42d56568285/volumes" Jan 30 12:28:16 crc kubenswrapper[4869]: I0130 12:28:16.444528 4869 scope.go:117] "RemoveContainer" containerID="488ac7a5500e6c34e1ac9ef9d6a5cca571580240de03c96ca59c8a2315d95d0f" Jan 30 12:28:16 crc kubenswrapper[4869]: I0130 12:28:16.463847 4869 scope.go:117] "RemoveContainer" containerID="7de40206275454617b976728062ff3694a4e99042e112e32f5b9ddf526ea43dd" Jan 30 12:28:16 crc kubenswrapper[4869]: I0130 12:28:16.480761 4869 scope.go:117] "RemoveContainer" containerID="778717c4c6abd6ac4a11a2a39620e5b2925257a2c286f46727a1afef91a80f95" Jan 30 12:28:16 crc kubenswrapper[4869]: I0130 12:28:16.554697 4869 scope.go:117] "RemoveContainer" containerID="83f6a94b8e1c0a32362066114f2fae94a1bfa36287e2ab31e30749a064128142" Jan 30 12:28:16 crc kubenswrapper[4869]: I0130 12:28:16.575146 4869 scope.go:117] "RemoveContainer" containerID="ba325cca243ce97470dad2e5d5da64e22b7ee6482551b1062176cd4a5c1c5cae" Jan 30 12:28:22 crc kubenswrapper[4869]: I0130 12:28:22.027310 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-dcp9h"] Jan 30 12:28:22 crc kubenswrapper[4869]: I0130 12:28:22.035231 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-dcp9h"] Jan 30 12:28:22 crc kubenswrapper[4869]: I0130 12:28:22.144585 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="787f8551-8c15-4948-802e-6f768a0eae9f" path="/var/lib/kubelet/pods/787f8551-8c15-4948-802e-6f768a0eae9f/volumes" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515137122016024443 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015137122016017360 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015137106366016515 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015137106366015465 5ustar corecore